"""
Antigravity Tools — Genesis V2 MCP
====================================
Cross-agent sync tools: Claude <-> Antigravity (Gemini) coordination.
File-locked state, JSONL event log, workflow dispatch, memory bridge.

Tools:
- antigravity_plan: Create workflow plan via Gemini
- antigravity_execute: Execute a plan
- gemini_query: Direct Gemini query
- sync_push / sync_pull / sync_status: Shared state sync
- memory_query / memory_store: Memory bridge
- antigravity_browser: Superior browser delegation
"""

import json
import logging
import os
import sys
import portalocker
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Dict, Optional

from genesis_v2.mcp.validation import check_rate_limit

logger = logging.getLogger(__name__)

# Sync directory layout
SYNC_DIR = Path("/mnt/e/genesis-system/data/antigravity-sync")
STATE_FILE = SYNC_DIR / "state.json"
EVENTS_FILE = SYNC_DIR / "events.jsonl"
PLANS_DIR = SYNC_DIR / "plans"
EXECUTIONS_DIR = SYNC_DIR / "executions"
CONTEXT_DIR = SYNC_DIR / "context"

# Lazy-loaded singletons
_workflow = None
_memory_bridge = None
_gemini_client = None


# =============================================================================
# Internal helpers
# =============================================================================

def _ensure_sync_dirs():
    """Create sync directory structure on first use."""
    for d in (SYNC_DIR, PLANS_DIR, EXECUTIONS_DIR, CONTEXT_DIR):
        d.mkdir(parents=True, exist_ok=True)

    if not EVENTS_FILE.exists():
        EVENTS_FILE.touch()

    if not STATE_FILE.exists():
        STATE_FILE.write_text(json.dumps({
            "created_at": datetime.now(timezone.utc).isoformat(),
            "sync_count": 0,
            "active_plan": None,
        }))


def _read_state() -> dict:
    """Read current shared state with advisory file locking."""
    _ensure_sync_dirs()
    try:
        with open(STATE_FILE, "r") as f:
            portalocker.lock(f, portalocker.LOCK_SH)
            try:
                data = json.load(f)
            finally:
                portalocker.unlock(f)
        return data
    except (json.JSONDecodeError, FileNotFoundError):
        return {
            "created_at": datetime.now(timezone.utc).isoformat(),
            "sync_count": 0,
            "active_plan": None,
        }


def _write_state(state: dict):
    """Write state with advisory file locking (read-modify-write safe)."""
    _ensure_sync_dirs()
    state["last_updated"] = datetime.now(timezone.utc).isoformat()
    with open(STATE_FILE, "w") as f:
        portalocker.lock(f, portalocker.LOCK_EX)
        try:
            json.dump(state, f, indent=2, default=str)
        finally:
            portalocker.unlock(f)


def _append_event(source: str, event_type: str, data: dict):
    """Append event to JSONL log (safe for concurrent writes)."""
    _ensure_sync_dirs()
    event = {
        "timestamp": datetime.now(timezone.utc).isoformat(),
        "source": source,
        "type": event_type,
        "data": data,
    }
    with open(EVENTS_FILE, "a") as f:
        portalocker.lock(f, portalocker.LOCK_EX)
        try:
            f.write(json.dumps(event) + "\n")
        finally:
            portalocker.unlock(f)


def _get_workflow():
    """Lazy import AntigravityWorkflow."""
    global _workflow
    if _workflow is None:
        try:
            sys.path.insert(0, '/mnt/e/genesis-system')
            from core.antigravity_workflow import AntigravityWorkflow
            _workflow = AntigravityWorkflow()
        except ImportError as e:
            logger.warning(f'AntigravityWorkflow not available: {e}')
    return _workflow


def _get_memory_bridge():
    """Lazy import MemoryBridge."""
    global _memory_bridge
    if _memory_bridge is None:
        try:
            sys.path.insert(0, '/mnt/e/genesis-system')
            from core.memory_bridge import MemoryBridge
            _memory_bridge = MemoryBridge()
        except ImportError as e:
            logger.warning(f'MemoryBridge not available: {e}')
    return _memory_bridge


def _get_gemini_client():
    """Lazy import GeminiClient."""
    global _gemini_client
    if _gemini_client is None:
        try:
            sys.path.insert(0, '/mnt/e/genesis-system')
            from core.gemini_executor import GeminiExecutor as GeminiClient
            _gemini_client = GeminiClient()
        except ImportError as e:
            logger.warning(f'GeminiClient not available: {e}')
    return _gemini_client


def _check_gemini_api_key() -> Optional[str]:
    """Check if GEMINI_API_KEY is available. Returns error string or None."""
    key = os.environ.get('GEMINI_API_KEY') or os.environ.get('GOOGLE_API_KEY')
    if not key:
        return "GEMINI_API_KEY not set. Configure it in config/secrets.env."
    return None


# =============================================================================
# Public MCP tools
# =============================================================================

def antigravity_plan(goal: str, context_json: str = "{}") -> str:
    """
    Create a workflow plan via Gemini Antigravity system.

    Args:
        goal: The goal to plan for
        context_json: Optional JSON context for planning

    Returns:
        JSON string with plan details or error
    """
    allowed, error = check_rate_limit('antigravity_plan')
    if not allowed:
        return json.dumps({"error": error})

    if not goal or not isinstance(goal, str):
        return json.dumps({"error": "goal must be a non-empty string"})

    api_error = _check_gemini_api_key()
    if api_error:
        return json.dumps({"error": api_error})

    workflow = _get_workflow()
    if workflow is None:
        return json.dumps({"error": "AntigravityWorkflow not available. Check imports."})

    try:
        context = json.loads(context_json)
    except json.JSONDecodeError:
        return json.dumps({"error": "Invalid context_json"})

    try:
        plan = workflow.create_plan(goal, context)

        plan_id = plan.plan_id if hasattr(plan, 'plan_id') else str(hash(goal))
        steps = plan.steps if hasattr(plan, 'steps') else (plan.__dict__ if hasattr(plan, '__dict__') else str(plan))
        if not isinstance(steps, (list, dict)):
            steps = str(steps)

        plan_data = {
            "plan_id": plan_id,
            "goal": goal,
            "steps": steps,
            "created_at": datetime.now(timezone.utc).isoformat(),
            "status": "planned",
        }

        # Persist to disk
        _ensure_sync_dirs()
        plan_file = PLANS_DIR / f"{plan_id}.json"
        plan_file.write_text(json.dumps(plan_data, indent=2, default=str))

        # Update shared state
        state = _read_state()
        state["active_plan"] = plan_id
        _write_state(state)

        _append_event("claude_code", "plan_created", {"plan_id": plan_id, "goal": goal[:200]})

        return json.dumps(plan_data, indent=2, default=str)

    except Exception as e:
        logger.error(f'antigravity_plan error: {e}')
        return json.dumps({"error": str(e)})


def antigravity_execute(plan_id: str) -> str:
    """
    Execute a previously created workflow plan.

    Args:
        plan_id: ID of the plan to execute

    Returns:
        JSON string with execution results or error
    """
    allowed, error = check_rate_limit('antigravity_execute')
    if not allowed:
        return json.dumps({"error": error})

    if not plan_id or not isinstance(plan_id, str):
        return json.dumps({"error": "plan_id must be a non-empty string"})

    api_error = _check_gemini_api_key()
    if api_error:
        return json.dumps({"error": api_error})

    workflow = _get_workflow()
    if workflow is None:
        return json.dumps({"error": "AntigravityWorkflow not available. Check imports."})

    _ensure_sync_dirs()
    plan_file = PLANS_DIR / f"{plan_id}.json"
    if not plan_file.exists():
        return json.dumps({"error": f"Plan not found: {plan_id}"})

    try:
        plan_data = json.loads(plan_file.read_text())
        result = workflow.execute_plan(plan_data)

        exec_ts = datetime.now(timezone.utc)
        result_value = result if isinstance(result, (dict, list)) else str(result)

        exec_data = {
            "plan_id": plan_id,
            "result": result_value,
            "executed_at": exec_ts.isoformat(),
            "status": "completed",
        }

        exec_file = EXECUTIONS_DIR / f"{plan_id}_{exec_ts.strftime('%Y%m%d_%H%M%S')}.json"
        exec_file.write_text(json.dumps(exec_data, indent=2, default=str))

        _append_event("claude_code", "plan_executed", {"plan_id": plan_id})

        return json.dumps(exec_data, indent=2, default=str)

    except Exception as e:
        logger.error(f'antigravity_execute error: {e}')
        return json.dumps({"error": str(e)})


def gemini_query(prompt: str, task_type: str = "general", temperature: float = 0.7) -> str:
    """
    Direct Gemini execution for analysis, planning, or generation.

    Args:
        prompt: The prompt to send to Gemini
        task_type: Type of task (general, analysis, planning, code_review)
        temperature: Sampling temperature (0.0-1.0)

    Returns:
        JSON string with Gemini response or error
    """
    allowed, error = check_rate_limit('gemini_query')
    if not allowed:
        return json.dumps({"error": error})

    if not prompt or not isinstance(prompt, str):
        return json.dumps({"error": "prompt must be a non-empty string"})

    api_error = _check_gemini_api_key()
    if api_error:
        return json.dumps({"error": api_error})

    client = _get_gemini_client()
    if client is None:
        return json.dumps({"error": "GeminiClient not available. Check imports."})

    try:
        response = client.query(prompt, task_type=task_type, temperature=temperature)
        result = response if isinstance(response, (dict, list)) else {"response": str(response)}
        _append_event("claude_code", "gemini_query", {"task_type": task_type, "prompt_len": len(prompt)})
        return json.dumps(result, indent=2, default=str)
    except Exception as e:
        logger.error(f'gemini_query error: {e}')
        return json.dumps({"error": str(e)})


def sync_push(context_json: str) -> str:
    """
    Push context to shared state for cross-agent sync.

    Args:
        context_json: JSON string with context data to push

    Returns:
        JSON string with sync result
    """
    allowed, error = check_rate_limit('sync_push')
    if not allowed:
        return json.dumps({"error": error})

    if not context_json or not isinstance(context_json, str):
        return json.dumps({"error": "context_json must be a non-empty string"})

    try:
        context = json.loads(context_json)
    except json.JSONDecodeError as e:
        return json.dumps({"error": f"Invalid JSON: {e}"})

    try:
        state = _read_state()
        state.setdefault("claude_context", {})
        state["claude_context"].update(context)
        state["sync_count"] = state.get("sync_count", 0) + 1
        _write_state(state)
        _append_event("claude_code", "sync_push", {"keys": list(context.keys())})
        return json.dumps({"success": True, "sync_count": state["sync_count"]})
    except Exception as e:
        logger.error(f'sync_push error: {e}')
        return json.dumps({"error": str(e)})


def sync_pull() -> str:
    """
    Pull latest shared state for cross-agent sync.

    Returns:
        JSON string with current shared state
    """
    allowed, error = check_rate_limit('sync_pull')
    if not allowed:
        return json.dumps({"error": error})

    try:
        state = _read_state()

        # Include recent events (last 20)
        recent_events = []
        if EVENTS_FILE.exists():
            lines = EVENTS_FILE.read_text().strip().splitlines()
            for line in lines[-20:]:
                try:
                    recent_events.append(json.loads(line))
                except json.JSONDecodeError:
                    continue

        return json.dumps({
            "state": state,
            "recent_events": recent_events,
        }, indent=2, default=str)
    except Exception as e:
        logger.error(f'sync_pull error: {e}')
        return json.dumps({"error": str(e)})


def sync_status() -> str:
    """
    Get sync health and component status.

    Returns:
        JSON string with sync health information
    """
    allowed, error = check_rate_limit('sync_status')
    if not allowed:
        return json.dumps({"error": error})

    try:
        _ensure_sync_dirs()
        state = _read_state()

        event_count = 0
        if EVENTS_FILE.exists():
            event_count = sum(1 for _ in EVENTS_FILE.read_text().strip().splitlines() if _.strip())

        plan_count = len(list(PLANS_DIR.glob("*.json"))) if PLANS_DIR.exists() else 0
        exec_count = len(list(EXECUTIONS_DIR.glob("*.json"))) if EXECUTIONS_DIR.exists() else 0

        return json.dumps({
            "sync_count": state.get("sync_count", 0),
            "active_plan": state.get("active_plan"),
            "event_count": event_count,
            "plan_count": plan_count,
            "execution_count": exec_count,
            "last_updated": state.get("last_updated"),
            "sync_dir": str(SYNC_DIR),
        }, indent=2)
    except Exception as e:
        logger.error(f'sync_status error: {e}')
        return json.dumps({"error": str(e)})


def memory_query(query: str) -> str:
    """
    Query memory bridge with Gemini context for enriched responses.

    Args:
        query: The query to search memory for

    Returns:
        JSON string with memory-enriched response or error
    """
    allowed, error = check_rate_limit('memory_query')
    if not allowed:
        return json.dumps({"error": error})

    if not query or not isinstance(query, str):
        return json.dumps({"error": "query must be a non-empty string"})

    bridge = _get_memory_bridge()
    if bridge is None:
        return json.dumps({"error": "MemoryBridge not available. Check imports."})

    try:
        result = bridge.query(query)
        result_value = result if isinstance(result, (dict, list)) else {"response": str(result)}
        return json.dumps(result_value, indent=2, default=str)
    except Exception as e:
        logger.error(f'memory_query error: {e}')
        return json.dumps({"error": str(e)})


def memory_store(insight_json: str) -> str:
    """
    Store an insight in the memory bridge for persistent context.

    Args:
        insight_json: JSON string with insight data (needs 'content' and 'category')

    Returns:
        JSON string with store result or error
    """
    allowed, error = check_rate_limit('memory_store')
    if not allowed:
        return json.dumps({"error": error})

    if not insight_json or not isinstance(insight_json, str):
        return json.dumps({"error": "insight_json must be a non-empty string"})

    try:
        insight = json.loads(insight_json)
    except json.JSONDecodeError as e:
        return json.dumps({"error": f"Invalid JSON: {e}"})

    if "content" not in insight:
        return json.dumps({"error": "insight_json must have 'content' field"})

    bridge = _get_memory_bridge()
    if bridge is None:
        return json.dumps({"error": "MemoryBridge not available. Check imports."})

    try:
        result = bridge.store(insight)
        result_value = result if isinstance(result, (dict, list)) else {"success": True, "result": str(result)}
        return json.dumps(result_value, indent=2, default=str)
    except Exception as e:
        logger.error(f'memory_store error: {e}')
        return json.dumps({"error": str(e)})


def antigravity_browser(task: str) -> str:
    """
    Execute a browser task via the Antigravity Master Browser Engine (High Stealth).
    
    Args:
        task: Description of the browser task to perform.
        
    Returns:
        JSON string with task status and delegation ID.
    """
    allowed, error = check_rate_limit('antigravity_browser')
    if not allowed:
        return json.dumps({"error": error})

    if not task or not isinstance(task, str):
        return json.dumps({"error": "task must be a non-empty string"})

    try:
        # Create delegation record for the Conductor to fulfill
        _ensure_sync_dirs()
        request_id = f"brw_{int(datetime.now(timezone.utc).timestamp())}"
        req_dir = SYNC_DIR / "browser_requests"
        req_dir.mkdir(parents=True, exist_ok=True)
        
        req_file = req_dir / f"{request_id}.json"
        req_data = {
            "id": request_id,
            "task": task,
            "timestamp": datetime.now(timezone.utc).isoformat(),
            "status": "pending",
            "source": "genesis_agent"
        }
        req_file.write_text(json.dumps(req_data, indent=2))
        
        _append_event("genesis_agent", "browser_delegated", {"request_id": request_id, "task": task[:100]})
        
        return json.dumps({
            "status": "delegated",
            "request_id": request_id,
            "message": "Task sent to Antigravity Master Browser. Check status in 30 seconds."
        })
    except Exception as e:
        logger.error(f'antigravity_browser error: {e}')
        return json.dumps({"error": str(e)})
