"""
AIVA Circadian Loop - PM-033

24-hour deep consolidation cycle.
Merges knowledge, generates daily summaries, and archives old context.
"""

import os
import json
import logging
import asyncio
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any
from dataclasses import dataclass, asdict, field
from pathlib import Path

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)


@dataclass
class DailySummary:
    """Summary of a day's activities."""
    date: str
    tasks_completed: int
    tasks_failed: int
    success_rate: float
    learnings_count: int
    key_learnings: List[str]
    decisions_made: int
    revenue_generated: float
    highlights: List[str]
    concerns: List[str]


@dataclass
class CircadianCycleEntry:
    """Entry from a circadian cycle."""
    cycle_id: str
    timestamp: str
    daily_summary: DailySummary
    knowledge_merged: int
    contexts_archived: int
    embeddings_consolidated: int
    duration_ms: float


class CircadianLoop:
    """
    24-hour deep consolidation loop for AIVA.

    Performs deep knowledge merging, generates daily summaries,
    and archives old context to prevent context rot.

    Usage:
        loop = CircadianLoop(memory_bridge)
        await loop.start()
    """

    def __init__(
        self,
        memory_bridge=None,
        rank_tracker=None,
        reflection_loop=None,
        strategic_loop=None,
        interval_seconds: int = 86400,  # 24 hours
        log_dir: str = "logs"
    ):
        """
        Initialize the circadian loop.

        Args:
            memory_bridge: MemoryBridge for storage operations
            rank_tracker: RankTracker for metrics
            reflection_loop: ReflectionLoop for learnings
            strategic_loop: StrategicLoop for decisions
            interval_seconds: Cycle interval (default 24 hours)
            log_dir: Directory for logs
        """
        self.memory_bridge = memory_bridge
        self.rank_tracker = rank_tracker
        self.reflection_loop = reflection_loop
        self.strategic_loop = strategic_loop
        self.interval_seconds = interval_seconds
        self.log_dir = Path(log_dir)
        self.log_dir.mkdir(parents=True, exist_ok=True)

        self._running = False
        self._task: Optional[asyncio.Task] = None
        self.cycle_history: List[CircadianCycleEntry] = []
        self.last_consolidation: Optional[datetime] = None

        logger.info(f"CircadianLoop initialized: interval={interval_seconds}s")

    async def start(self) -> None:
        """Start the circadian loop."""
        if self._running:
            logger.warning("Circadian loop already running")
            return

        self._running = True
        self._task = asyncio.create_task(self._loop())
        logger.info("Circadian loop started")

    async def stop(self) -> None:
        """Stop the circadian loop."""
        self._running = False
        if self._task:
            self._task.cancel()
            try:
                await self._task
            except asyncio.CancelledError:
                pass
        logger.info("Circadian loop stopped")

    async def _loop(self) -> None:
        """Main loop execution."""
        while self._running:
            try:
                await self.deep_consolidate()
                await asyncio.sleep(self.interval_seconds)
            except asyncio.CancelledError:
                break
            except Exception as e:
                logger.error(f"Circadian loop error: {e}")
                await asyncio.sleep(3600)  # Wait 1 hour before retry

    async def deep_consolidate(self) -> CircadianCycleEntry:
        """
        Execute a deep consolidation cycle.

        Returns:
            CircadianCycleEntry with results
        """
        import time
        start_time = time.time()
        cycle_id = f"circ_{int(time.time() * 1000)}"

        logger.info(f"Starting circadian cycle {cycle_id}")

        knowledge_merged = 0
        contexts_archived = 0
        embeddings_consolidated = 0

        try:
            # 1. Generate daily summary
            daily_summary = await self._generate_daily_summary()

            # 2. Merge knowledge from the day
            knowledge_merged = await self._merge_knowledge()

            # 3. Archive old contexts
            contexts_archived = await self._archive_old_contexts()

            # 4. Consolidate embeddings
            embeddings_consolidated = await self._consolidate_embeddings()

            # 5. Persist the summary
            await self._persist_summary(daily_summary)

            self.last_consolidation = datetime.utcnow()

        except Exception as e:
            logger.error(f"Circadian cycle failed: {e}")
            daily_summary = DailySummary(
                date=datetime.utcnow().strftime("%Y-%m-%d"),
                tasks_completed=0,
                tasks_failed=0,
                success_rate=0.0,
                learnings_count=0,
                key_learnings=[],
                decisions_made=0,
                revenue_generated=0.0,
                highlights=[],
                concerns=[f"Circadian cycle error: {e}"]
            )

        duration = (time.time() - start_time) * 1000

        entry = CircadianCycleEntry(
            cycle_id=cycle_id,
            timestamp=datetime.utcnow().isoformat(),
            daily_summary=daily_summary,
            knowledge_merged=knowledge_merged,
            contexts_archived=contexts_archived,
            embeddings_consolidated=embeddings_consolidated,
            duration_ms=duration
        )

        self.cycle_history.append(entry)
        self._log_cycle(entry)

        logger.info(
            f"Circadian cycle {cycle_id} complete: "
            f"{knowledge_merged} merged, {contexts_archived} archived "
            f"in {duration:.0f}ms"
        )

        return entry

    async def _generate_daily_summary(self) -> DailySummary:
        """Generate summary of the day's activities."""
        today = datetime.utcnow().strftime("%Y-%m-%d")

        # Gather metrics from rank tracker
        tasks_completed = 0
        tasks_failed = 0
        success_rate = 0.0
        revenue_generated = 0.0

        if self.rank_tracker:
            tasks_completed = self.rank_tracker.metrics.tasks_completed
            tasks_failed = self.rank_tracker.metrics.tasks_failed
            success_rate = self.rank_tracker.metrics.success_rate
            revenue_generated = self.rank_tracker.metrics.revenue_generated

        # Gather learnings from reflection loop
        learnings = []
        learnings_count = 0
        if self.reflection_loop:
            stats = self.reflection_loop.get_stats()
            learnings_count = stats.get("total_learnings", 0)
            recent = self.reflection_loop.get_recent_reflections(limit=24)
            for r in recent:
                learnings.extend(r.get("learnings", []))

        # Gather decisions from strategic loop
        decisions_made = 0
        if self.strategic_loop:
            stats = self.strategic_loop.get_stats()
            decisions_made = stats.get("total_decisions", 0)

        # Identify highlights and concerns
        highlights = []
        concerns = []

        if success_rate >= 0.9:
            highlights.append(f"High success rate: {success_rate:.1%}")
        elif success_rate < 0.7:
            concerns.append(f"Low success rate: {success_rate:.1%}")

        if revenue_generated > 0:
            highlights.append(f"Revenue generated: ${revenue_generated:.2f}")

        if tasks_completed > 100:
            highlights.append(f"High productivity: {tasks_completed} tasks completed")

        return DailySummary(
            date=today,
            tasks_completed=tasks_completed,
            tasks_failed=tasks_failed,
            success_rate=success_rate,
            learnings_count=learnings_count,
            key_learnings=learnings[:10],  # Top 10 learnings
            decisions_made=decisions_made,
            revenue_generated=revenue_generated,
            highlights=highlights,
            concerns=concerns
        )

    async def _merge_knowledge(self) -> int:
        """Merge knowledge accumulated during the day."""
        if not self.memory_bridge:
            return 0

        # In production, this would:
        # 1. Deduplicate entities
        # 2. Merge similar axioms
        # 3. Update relationship strengths
        logger.debug("Merging knowledge...")
        return 0  # Placeholder

    async def _archive_old_contexts(self) -> int:
        """Archive contexts older than threshold."""
        if not self.memory_bridge:
            return 0

        # In production, this would:
        # 1. Identify contexts older than X days
        # 2. Summarize and compress
        # 3. Move to cold storage
        logger.debug("Archiving old contexts...")
        return 0  # Placeholder

    async def _consolidate_embeddings(self) -> int:
        """Consolidate and optimize embeddings."""
        if not self.memory_bridge:
            return 0

        # In production, this would:
        # 1. Remove redundant embeddings
        # 2. Re-index for performance
        # 3. Update similarity cache
        logger.debug("Consolidating embeddings...")
        return 0  # Placeholder

    async def _persist_summary(self, summary: DailySummary) -> None:
        """Persist daily summary to storage."""
        summary_file = self.log_dir / f"daily_summary_{summary.date}.json"
        try:
            with open(summary_file, "w") as f:
                json.dump(asdict(summary), f, indent=2)
            logger.info(f"Daily summary saved: {summary_file}")
        except Exception as e:
            logger.error(f"Failed to save daily summary: {e}")

        # Also store in memory bridge
        if self.memory_bridge:
            try:
                self.memory_bridge.store_memory(
                    content=asdict(summary),
                    memory_type="entity",
                    metadata={
                        "entity_type": "daily_summary",
                        "name": f"Summary {summary.date}",
                        "date": summary.date
                    }
                )
            except Exception as e:
                logger.error(f"Failed to store summary in memory: {e}")

    def _log_cycle(self, entry: CircadianCycleEntry) -> None:
        """Log cycle to file."""
        log_file = self.log_dir / "circadian_loop.jsonl"
        try:
            data = asdict(entry)
            data["daily_summary"] = asdict(entry.daily_summary)
            with open(log_file, "a") as f:
                f.write(json.dumps(data) + "\n")
        except Exception as e:
            logger.error(f"Failed to log cycle: {e}")

    def get_recent_cycles(self, limit: int = 7) -> List[Dict]:
        """Get recent cycle entries."""
        entries = []
        for c in self.cycle_history[-limit:]:
            data = asdict(c)
            data["daily_summary"] = asdict(c.daily_summary)
            entries.append(data)
        return entries

    def get_stats(self) -> Dict:
        """Get circadian loop statistics."""
        if not self.cycle_history:
            return {
                "total_cycles": 0,
                "last_consolidation": None
            }

        return {
            "total_cycles": len(self.cycle_history),
            "total_knowledge_merged": sum(c.knowledge_merged for c in self.cycle_history),
            "total_contexts_archived": sum(c.contexts_archived for c in self.cycle_history),
            "avg_duration_ms": sum(c.duration_ms for c in self.cycle_history) / len(self.cycle_history),
            "last_consolidation": self.last_consolidation.isoformat() if self.last_consolidation else None,
            "is_running": self._running
        }


# Singleton instance
_circadian_loop: Optional[CircadianLoop] = None


def get_circadian_loop(**kwargs) -> CircadianLoop:
    """Get or create singleton CircadianLoop."""
    global _circadian_loop
    if _circadian_loop is None:
        _circadian_loop = CircadianLoop(**kwargs)
    return _circadian_loop


if __name__ == "__main__":
    import asyncio

    async def main():
        loop = CircadianLoop()

        # Run one consolidation cycle
        entry = await loop.deep_consolidate()

        print(f"\nCircadian Cycle Complete:")
        print(f"  ID: {entry.cycle_id}")
        print(f"  Knowledge Merged: {entry.knowledge_merged}")
        print(f"  Contexts Archived: {entry.contexts_archived}")
        print(f"  Duration: {entry.duration_ms:.0f}ms")

        summary = entry.daily_summary
        print(f"\nDaily Summary ({summary.date}):")
        print(f"  Tasks: {summary.tasks_completed} completed, {summary.tasks_failed} failed")
        print(f"  Success Rate: {summary.success_rate:.1%}")
        print(f"  Learnings: {summary.learnings_count}")
        print(f"  Revenue: ${summary.revenue_generated:.2f}")

        if summary.highlights:
            print(f"\nHighlights:")
            for h in summary.highlights:
                print(f"  + {h}")

        if summary.concerns:
            print(f"\nConcerns:")
            for c in summary.concerns:
                print(f"  ! {c}")

        print(f"\nStats: {loop.get_stats()}")

    asyncio.run(main())
