#!/usr/bin/env python3
"""
GENESIS RESEARCH SCHEDULER
===========================
Intelligent scheduling of research tasks to maximize API utilization.

Fills idle capacity with AI research intelligence gathering while
ensuring production tasks always have priority.

Features:
- Monitors rate limit utilization
- Schedules research during idle capacity
- Balances research vs production workload
- Runs research during off-peak hours
- Integrates with RWL for task execution

Usage:
    scheduler = ResearchScheduler()

    # Check if research should run
    if scheduler.should_run_research():
        scheduler.run_research_cycle()

    # Or run as background daemon
    scheduler.start_daemon()
"""

import json
import time
import threading
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, List, Optional, Any, Callable
from dataclasses import dataclass, field
import sys

# Add parent to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))

from core.gemini_rate_maximizer import GeminiRateMaximizer, TaskType


@dataclass
class SchedulerConfig:
    """Scheduler configuration."""
    enabled: bool = True
    min_idle_capacity: float = 0.20  # 20% capacity must be free
    max_research_utilization: float = 0.30  # Max 30% for research
    scan_interval_minutes: int = 30
    off_peak_start_hour: int = 22  # 10 PM
    off_peak_end_hour: int = 6     # 6 AM
    off_peak_boost: float = 1.5    # 1.5x research during off-peak


@dataclass
class SchedulerStatus:
    """Current scheduler status."""
    running: bool = False
    last_research_run: Optional[str] = None
    research_runs_today: int = 0
    insights_harvested_today: int = 0
    tasks_generated_today: int = 0
    current_utilization: float = 0.0
    next_scheduled_run: Optional[str] = None


class ResearchScheduler:
    """
    Intelligent research task scheduler.

    Monitors API utilization and schedules research during idle periods.
    """

    CONFIG_PATH = Path("E:/genesis-system/config/gemini_rate_limits.json")
    STATUS_PATH = Path("E:/genesis-system/data/research_scheduler_status.json")
    LOG_PATH = Path("E:/genesis-system/logs/research_scheduler.jsonl")

    def __init__(self):
        self.rate_maximizer = GeminiRateMaximizer()
        self.config = self._load_config()
        self.status = self._load_status()
        self._daemon_thread: Optional[threading.Thread] = None
        self._stop_event = threading.Event()
        self._callbacks: List[Callable] = []

    def _load_config(self) -> SchedulerConfig:
        """Load scheduler configuration."""
        if self.CONFIG_PATH.exists():
            with open(self.CONFIG_PATH) as f:
                data = json.load(f)
                research_config = data.get("research_config", {})
                return SchedulerConfig(
                    enabled=research_config.get("enabled", True),
                    min_idle_capacity=research_config.get("min_capacity_for_research", 0.20),
                    max_research_utilization=research_config.get("max_research_percentage", 0.30),
                    scan_interval_minutes=research_config.get("scan_interval_minutes", 30)
                )
        return SchedulerConfig()

    def _load_status(self) -> SchedulerStatus:
        """Load scheduler status."""
        if self.STATUS_PATH.exists():
            with open(self.STATUS_PATH) as f:
                data = json.load(f)
                return SchedulerStatus(**data)
        return SchedulerStatus()

    def _save_status(self):
        """Save scheduler status."""
        self.STATUS_PATH.parent.mkdir(parents=True, exist_ok=True)
        with open(self.STATUS_PATH, "w") as f:
            json.dump({
                "running": self.status.running,
                "last_research_run": self.status.last_research_run,
                "research_runs_today": self.status.research_runs_today,
                "insights_harvested_today": self.status.insights_harvested_today,
                "tasks_generated_today": self.status.tasks_generated_today,
                "current_utilization": self.status.current_utilization,
                "next_scheduled_run": self.status.next_scheduled_run
            }, f, indent=2)

    def _log(self, event: str, data: Dict = None):
        """Log scheduler event."""
        self.LOG_PATH.parent.mkdir(parents=True, exist_ok=True)
        entry = {
            "timestamp": datetime.now().isoformat(),
            "event": event,
            "data": data or {}
        }
        with open(self.LOG_PATH, "a") as f:
            f.write(json.dumps(entry) + "\n")

    def is_off_peak(self) -> bool:
        """Check if current time is off-peak hours."""
        hour = datetime.now().hour
        if self.config.off_peak_start_hour > self.config.off_peak_end_hour:
            # Spans midnight (e.g., 22-6)
            return hour >= self.config.off_peak_start_hour or hour < self.config.off_peak_end_hour
        else:
            return self.config.off_peak_start_hour <= hour < self.config.off_peak_end_hour

    def get_available_capacity(self) -> Dict[str, Any]:
        """
        Get available capacity for research.

        Returns capacity info with adjustments for off-peak hours.
        """
        report = self.rate_maximizer.get_utilization_report()

        # Calculate total available capacity
        total_available = 1.0 - report.total_capacity_used

        # Apply off-peak boost
        if self.is_off_peak():
            effective_max = self.config.max_research_utilization * self.config.off_peak_boost
        else:
            effective_max = self.config.max_research_utilization

        # Calculate research budget
        research_capacity = min(total_available, effective_max)

        return {
            "total_utilization": report.total_capacity_used,
            "available_capacity": total_available,
            "research_capacity": research_capacity,
            "is_off_peak": self.is_off_peak(),
            "best_model": report.best_model,
            "can_research": total_available >= self.config.min_idle_capacity
        }

    def should_run_research(self) -> tuple[bool, str]:
        """
        Determine if research should run now.

        Returns:
            (should_run, reason)
        """
        if not self.config.enabled:
            return False, "research_disabled"

        capacity = self.get_available_capacity()

        if not capacity["can_research"]:
            return False, f"insufficient_capacity_{capacity['available_capacity']:.1%}"

        # Check time since last run
        if self.status.last_research_run:
            last_run = datetime.fromisoformat(self.status.last_research_run)
            minutes_since = (datetime.now() - last_run).total_seconds() / 60

            # Adjust interval for off-peak
            interval = self.config.scan_interval_minutes
            if self.is_off_peak():
                interval = interval / self.config.off_peak_boost  # Run more frequently

            if minutes_since < interval:
                return False, f"too_soon_{int(interval - minutes_since)}min_remaining"

        return True, f"ready_capacity_{capacity['research_capacity']:.1%}"

    def run_research_cycle(self) -> Dict[str, Any]:
        """
        Execute a research cycle.

        Returns:
            Results summary
        """
        self._log("research_cycle_start")

        try:
            # Import research scout
            from skills.ai_research_scout import AIResearchScout

            scout = AIResearchScout()
            result = scout.run_scan_cycle()

            # Update status
            self.status.last_research_run = datetime.now().isoformat()
            self.status.research_runs_today += 1
            self.status.insights_harvested_today += result.get("total_insights", 0)
            self.status.tasks_generated_today += result.get("tasks_generated", 0)

            # Calculate next run time
            interval = self.config.scan_interval_minutes
            if self.is_off_peak():
                interval = int(interval / self.config.off_peak_boost)
            self.status.next_scheduled_run = (
                datetime.now() + timedelta(minutes=interval)
            ).isoformat()

            self._save_status()
            self._log("research_cycle_complete", result)

            # Notify callbacks
            for callback in self._callbacks:
                try:
                    callback("research_complete", result)
                except Exception as e:
                    self._log("callback_error", {"error": str(e)})

            return result

        except ImportError as e:
            self._log("research_cycle_error", {"error": f"Import error: {e}"})
            return {"error": str(e)}
        except Exception as e:
            self._log("research_cycle_error", {"error": str(e)})
            return {"error": str(e)}

    def _daemon_loop(self):
        """Background daemon loop."""
        self._log("daemon_started")

        while not self._stop_event.is_set():
            try:
                should_run, reason = self.should_run_research()

                if should_run:
                    capacity = self.get_available_capacity()
                    self.status.current_utilization = capacity["total_utilization"]
                    self.run_research_cycle()
                else:
                    self._log("research_skipped", {"reason": reason})

                # Sleep with interrupt check
                for _ in range(60):  # Check every second for 60 seconds
                    if self._stop_event.is_set():
                        break
                    time.sleep(1)

            except Exception as e:
                self._log("daemon_error", {"error": str(e)})
                time.sleep(60)  # Wait before retry

        self._log("daemon_stopped")

    def start_daemon(self):
        """Start background daemon thread."""
        if self._daemon_thread and self._daemon_thread.is_alive():
            return False

        self._stop_event.clear()
        self._daemon_thread = threading.Thread(target=self._daemon_loop, daemon=True)
        self._daemon_thread.start()
        self.status.running = True
        self._save_status()
        return True

    def stop_daemon(self):
        """Stop background daemon thread."""
        self._stop_event.set()
        if self._daemon_thread:
            self._daemon_thread.join(timeout=5)
        self.status.running = False
        self._save_status()

    def register_callback(self, callback: Callable):
        """Register callback for research events."""
        self._callbacks.append(callback)

    def get_status(self) -> Dict[str, Any]:
        """Get comprehensive scheduler status."""
        capacity = self.get_available_capacity()
        should_run, reason = self.should_run_research()

        return {
            "enabled": self.config.enabled,
            "running": self.status.running,
            "is_off_peak": self.is_off_peak(),
            "current_utilization": capacity["total_utilization"],
            "available_capacity": capacity["available_capacity"],
            "research_capacity": capacity["research_capacity"],
            "should_run_now": should_run,
            "status_reason": reason,
            "last_run": self.status.last_research_run,
            "next_scheduled": self.status.next_scheduled_run,
            "today_stats": {
                "runs": self.status.research_runs_today,
                "insights": self.status.insights_harvested_today,
                "tasks": self.status.tasks_generated_today
            }
        }

    def reset_daily_stats(self):
        """Reset daily statistics (call at midnight)."""
        self.status.research_runs_today = 0
        self.status.insights_harvested_today = 0
        self.status.tasks_generated_today = 0
        self._save_status()
        self._log("daily_stats_reset")


def main():
    """CLI for Research Scheduler."""
    import argparse

    parser = argparse.ArgumentParser(description="Genesis Research Scheduler")
    parser.add_argument("command", choices=["status", "run", "start", "stop", "capacity"])
    args = parser.parse_args()

    scheduler = ResearchScheduler()

    if args.command == "status":
        status = scheduler.get_status()
        print(f"\n{'='*50}")
        print("RESEARCH SCHEDULER STATUS")
        print(f"{'='*50}\n")

        print(f"Enabled: {status['enabled']}")
        print(f"Running: {status['running']}")
        print(f"Off-peak: {status['is_off_peak']}")
        print()

        print("Capacity:")
        print(f"  Current utilization: {status['current_utilization']:.1%}")
        print(f"  Available capacity: {status['available_capacity']:.1%}")
        print(f"  Research capacity: {status['research_capacity']:.1%}")
        print()

        print(f"Should run now: {status['should_run_now']} ({status['status_reason']})")
        print(f"Last run: {status['last_run'] or 'Never'}")
        print(f"Next scheduled: {status['next_scheduled'] or 'Not scheduled'}")
        print()

        print("Today's stats:")
        print(f"  Research runs: {status['today_stats']['runs']}")
        print(f"  Insights harvested: {status['today_stats']['insights']}")
        print(f"  Tasks generated: {status['today_stats']['tasks']}")

    elif args.command == "run":
        print("Running research cycle...")
        result = scheduler.run_research_cycle()
        print(f"\nResult: {json.dumps(result, indent=2)}")

    elif args.command == "start":
        if scheduler.start_daemon():
            print("Daemon started successfully")
        else:
            print("Daemon already running")

    elif args.command == "stop":
        scheduler.stop_daemon()
        print("Daemon stopped")

    elif args.command == "capacity":
        capacity = scheduler.get_available_capacity()
        print(f"\n{'='*50}")
        print("AVAILABLE RESEARCH CAPACITY")
        print(f"{'='*50}\n")

        print(f"Total utilization: {capacity['total_utilization']:.1%}")
        print(f"Available capacity: {capacity['available_capacity']:.1%}")
        print(f"Research capacity: {capacity['research_capacity']:.1%}")
        print(f"Off-peak hours: {capacity['is_off_peak']}")
        print(f"Best model: {capacity['best_model']}")
        print(f"Can research: {capacity['can_research']}")


if __name__ == "__main__":
    main()
