"""
ATLAS — Research & Intelligence General
========================================
Fires N parallel research scout sub-agents via `claude -p` subprocess calls.

Usage:
    from core.generals.atlas import spawn_atlas
    results = spawn_atlas("Top 20 Australian digital agencies", scout_count=5)
"""

import json
import subprocess
import uuid
from datetime import datetime, timezone, timedelta
from pathlib import Path

REPO_ROOT = Path("/mnt/e/genesis-system")
SWARM_PROGRESS_DIR = REPO_ROOT / "data" / "swarm_progress"
HIVE_PROGRESS_DIR = REPO_ROOT / "hive" / "progress"

ATLAS_SYSTEM_PROMPT = """You are ATLAS, the Research & Intelligence General for Genesis.

Your mission: {mission}

Sub-task {task_index} of {total_tasks}: {sub_task}

Instructions:
1. Research this topic thoroughly using WebSearch and WebFetch tools
2. Extract key facts, numbers, names, URLs
3. Identify competitive intelligence relevant to Genesis (ReceptionistAI / AI voice widgets)
4. Output a structured research report with: Summary, Key Facts, Opportunities, Risks
5. Write KG entity if you discover significant intelligence (append to KNOWLEDGE_GRAPH/entities/)

Output format:
## Research Report: {sub_task}
### Summary
### Key Facts
### Competitive Intelligence
### Opportunities for Genesis
### Sources
"""


def spawn_atlas(mission: str, scout_count: int = 5) -> dict:
    """
    Spawn N parallel ATLAS research scouts via subprocess.

    Parameters
    ----------
    mission : str
        High-level research mission (e.g. "Top 20 AU digital agencies Brisbane")
    scout_count : int
        Number of parallel research scouts to fire (default 5)

    Returns
    -------
    dict with keys: swarm_id, mission, agent_count, status, output_dir
    """
    SWARM_PROGRESS_DIR.mkdir(parents=True, exist_ok=True)
    HIVE_PROGRESS_DIR.mkdir(parents=True, exist_ok=True)

    swarm_id = f"atlas_{uuid.uuid4().hex[:8]}"
    output_dir = HIVE_PROGRESS_DIR / swarm_id
    output_dir.mkdir(parents=True, exist_ok=True)

    # Decompose mission into sub-tasks
    sub_tasks = _decompose_mission(mission, scout_count)

    swarm_state = {
        "swarm_id": swarm_id,
        "general": "ATLAS",
        "mission": mission,
        "model": "google/gemini-2.5-flash",
        "agent_count": scout_count,
        "sub_tasks": sub_tasks,
        "status": "running",
        "stories_completed": 0,
        "stories_completed_this_hour": 0,
        "started_at": datetime.now(timezone.utc).isoformat(),
        "output_dir": str(output_dir),
        "processes": [],
    }

    # Write initial state
    state_file = SWARM_PROGRESS_DIR / f"{swarm_id}.json"
    _save_state(state_file, swarm_state)

    # Fire scouts in parallel (background)
    processes = []
    for i, sub_task in enumerate(sub_tasks):
        prompt = ATLAS_SYSTEM_PROMPT.format(
            mission=mission,
            task_index=i + 1,
            total_tasks=scout_count,
            sub_task=sub_task,
        )
        output_file = output_dir / f"scout_{i+1:02d}.md"

        # NOTE: Does NOT fire actual API calls — framework only.
        # Replace with live call:
        # proc = subprocess.Popen(
        #     ["claude", "-p", prompt, "--output", str(output_file)],
        #     stdout=subprocess.DEVNULL,
        #     stderr=subprocess.DEVNULL,
        # )
        # processes.append(proc.pid)

        # Write prompt template for each scout
        (output_dir / f"scout_{i+1:02d}_prompt.md").write_text(prompt)
        print(f"[ATLAS] Scout {i+1}/{scout_count} queued: {sub_task[:60]}")

    swarm_state["processes"] = processes
    _save_state(state_file, swarm_state)

    print(f"[ATLAS] Swarm {swarm_id} launched — {scout_count} scouts for: {mission[:60]}")
    return swarm_state


def spawn(mission: str, scout_count: int = 5) -> dict:
    """Alias for spawn_atlas — used by genesis_hive.py router."""
    return spawn_atlas(mission, scout_count=scout_count)


def _decompose_mission(mission: str, count: int) -> list[str]:
    """
    Break a mission into sub-tasks.
    In production this would use an LLM decomposer.
    For now: generate numbered research angles.
    """
    angles = [
        f"Overview and market landscape: {mission}",
        f"Top players and competitive positioning: {mission}",
        f"Pricing models and packages: {mission}",
        f"Technology stack and capabilities: {mission}",
        f"Contact details and outreach targets: {mission}",
        f"Recent news and developments: {mission}",
        f"Weaknesses and gaps we can exploit: {mission}",
        f"Customer reviews and pain points: {mission}",
        f"Partnership and integration opportunities: {mission}",
        f"Regulatory and compliance considerations: {mission}",
    ]
    return angles[:count]


def _save_state(path: Path, state: dict) -> None:
    state_copy = {k: v for k, v in state.items() if k != "processes"}
    path.write_text(json.dumps(state_copy, indent=2, default=str))
