"""
File Lock MCP Tools
====================
Redis-backed file lock registry preventing both agents from editing same files.
Falls back to file-based tracking when Redis is unavailable.

Tools:
- file_lock_claim: Lock files with 4-hour TTL
- file_lock_release: Release file locks
- file_lock_status: Show all active locks
"""

import json
import sys
import logging
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, Optional

logger = logging.getLogger(__name__)

# Constants
LOCK_TTL_HOURS = 4
REDIS_LOCK_KEY = "genesis:file_locks"
SYNC_DIR = Path("/mnt/e/genesis-system/data/antigravity-sync")
LOCK_FILE = SYNC_DIR / "file_locks.json"
COORDINATION_FILE = Path("/mnt/e/genesis-system/.agent/coordination/UVS_PARALLEL_WORK.md")

# Lazy Redis client
_redis_client = None


def _get_redis():
    """Lazy load Redis client."""
    global _redis_client
    if _redis_client is None:
        try:
            sys.path.insert(0, '/mnt/e/genesis-system/data/genesis-memory')
            from elestio_config import RedisConfig
            import redis
            _redis_client = redis.Redis(**RedisConfig.get_connection_params())
            _redis_client.ping()
        except Exception as e:
            logger.warning(f"Redis unavailable for file locks: {e}")
            _redis_client = None
    return _redis_client


def _read_file_locks() -> Dict[str, dict]:
    """Read locks from file fallback."""
    try:
        if LOCK_FILE.exists():
            with open(LOCK_FILE, "r") as f:
                locks = json.load(f)
            # Prune expired locks
            now = datetime.utcnow()
            active = {}
            for path, info in locks.items():
                expires = datetime.fromisoformat(info["expires_at"])
                if expires > now:
                    active[path] = info
            return active
    except Exception as e:
        logger.error(f"Failed to read file locks: {e}")
    return {}


def _write_file_locks(locks: Dict[str, dict]):
    """Write locks to file fallback."""
    try:
        LOCK_FILE.parent.mkdir(parents=True, exist_ok=True)
        with open(LOCK_FILE, "w") as f:
            json.dump(locks, f, indent=2)
    except Exception as e:
        logger.error(f"Failed to write file locks: {e}")


def _update_coordination_file(locks: Dict[str, dict]):
    """Update .agent/coordination/UVS_PARALLEL_WORK.md CLAIMED FILES section."""
    try:
        coord_dir = COORDINATION_FILE.parent
        coord_dir.mkdir(parents=True, exist_ok=True)

        # Build claimed files section
        lines = ["# Parallel Work Coordination\n\n"]
        lines.append(f"Last updated: {datetime.utcnow().isoformat()}\n\n")
        lines.append("## CLAIMED FILES\n\n")

        if locks:
            lines.append("| File | Agent | Claimed At | Expires At |\n")
            lines.append("|------|-------|------------|------------|\n")
            for path, info in sorted(locks.items()):
                lines.append(
                    f"| `{path}` | {info['agent']} | "
                    f"{info['claimed_at']} | {info['expires_at']} |\n"
                )
        else:
            lines.append("No files currently claimed.\n")

        lines.append("\n## RULES\n\n")
        lines.append("- Check this file before editing any listed files\n")
        lines.append("- Use `file_lock_claim` MCP tool to claim files\n")
        lines.append("- Locks auto-expire after 4 hours\n")

        with open(COORDINATION_FILE, "w") as f:
            f.writelines(lines)
    except Exception as e:
        logger.debug(f"Failed to update coordination file: {e}")


def file_lock_claim(files_json: str, agent: str) -> str:
    """
    Claim file locks to prevent concurrent editing.

    Args:
        files_json: JSON array of file paths to lock
        agent: Agent name claiming the lock (e.g., "claude", "antigravity")

    Returns:
        JSON with claim results per file
    """
    try:
        files = json.loads(files_json)
        if not isinstance(files, list):
            files = [files]
    except json.JSONDecodeError as e:
        return json.dumps({"error": f"Invalid JSON: {e}"})

    now = datetime.utcnow()
    expires = now + timedelta(hours=LOCK_TTL_HOURS)
    results = {}
    redis_client = _get_redis()

    for filepath in files:
        filepath = str(filepath).strip()
        lock_info = {
            "agent": agent,
            "claimed_at": now.isoformat(),
            "expires_at": expires.isoformat()
        }

        claimed = False
        conflict = None

        # Try Redis first
        if redis_client:
            try:
                existing = redis_client.hget(REDIS_LOCK_KEY, filepath)
                if existing:
                    existing_lock = json.loads(existing)
                    existing_expires = datetime.fromisoformat(existing_lock["expires_at"])
                    if existing_expires > now and existing_lock["agent"] != agent:
                        conflict = existing_lock["agent"]
                    else:
                        # Expired or same agent - overwrite
                        redis_client.hset(REDIS_LOCK_KEY, filepath, json.dumps(lock_info))
                        claimed = True
                else:
                    redis_client.hset(REDIS_LOCK_KEY, filepath, json.dumps(lock_info))
                    claimed = True
            except Exception as e:
                logger.debug(f"Redis lock claim failed for {filepath}: {e}")

        # File fallback (always)
        file_locks = _read_file_locks()
        if filepath in file_locks:
            existing_lock = file_locks[filepath]
            existing_expires = datetime.fromisoformat(existing_lock["expires_at"])
            if existing_expires > now and existing_lock["agent"] != agent:
                if not conflict:
                    conflict = existing_lock["agent"]
            else:
                file_locks[filepath] = lock_info
                claimed = True
        else:
            file_locks[filepath] = lock_info
            claimed = True

        _write_file_locks(file_locks)

        if conflict:
            results[filepath] = {"claimed": False, "conflict": conflict}
        else:
            results[filepath] = {"claimed": claimed}

    # Update coordination file
    all_locks = _read_file_locks()
    _update_coordination_file(all_locks)

    return json.dumps({
        "results": results,
        "agent": agent,
        "ttl_hours": LOCK_TTL_HOURS
    })


def file_lock_release(files_json: str, agent: str) -> str:
    """
    Release file locks.

    Args:
        files_json: JSON array of file paths to unlock
        agent: Agent releasing the locks

    Returns:
        JSON with release results
    """
    try:
        files = json.loads(files_json)
        if not isinstance(files, list):
            files = [files]
    except json.JSONDecodeError as e:
        return json.dumps({"error": f"Invalid JSON: {e}"})

    results = {}
    redis_client = _get_redis()

    for filepath in files:
        filepath = str(filepath).strip()
        released = False

        # Release in Redis
        if redis_client:
            try:
                existing = redis_client.hget(REDIS_LOCK_KEY, filepath)
                if existing:
                    existing_lock = json.loads(existing)
                    if existing_lock["agent"] == agent:
                        redis_client.hdel(REDIS_LOCK_KEY, filepath)
                        released = True
                    else:
                        results[filepath] = {
                            "released": False,
                            "error": f"Locked by {existing_lock['agent']}, not {agent}"
                        }
                        continue
            except Exception as e:
                logger.debug(f"Redis lock release failed for {filepath}: {e}")

        # Release in file
        file_locks = _read_file_locks()
        if filepath in file_locks:
            if file_locks[filepath]["agent"] == agent:
                del file_locks[filepath]
                released = True
            elif not released:
                results[filepath] = {
                    "released": False,
                    "error": f"Locked by {file_locks[filepath]['agent']}, not {agent}"
                }
                continue
        _write_file_locks(file_locks)

        results[filepath] = {"released": released}

    # Update coordination file
    all_locks = _read_file_locks()
    _update_coordination_file(all_locks)

    return json.dumps({"results": results, "agent": agent})


def file_lock_status() -> str:
    """
    Show all active file locks.

    Returns:
        JSON with all active locks from Redis + file
    """
    now = datetime.utcnow()
    locks = {}

    # Try Redis first
    redis_client = _get_redis()
    if redis_client:
        try:
            all_locks = redis_client.hgetall(REDIS_LOCK_KEY)
            for filepath, info_str in all_locks.items():
                if isinstance(filepath, bytes):
                    filepath = filepath.decode("utf-8")
                if isinstance(info_str, bytes):
                    info_str = info_str.decode("utf-8")
                info = json.loads(info_str)
                expires = datetime.fromisoformat(info["expires_at"])
                if expires > now:
                    info["source"] = "redis"
                    locks[filepath] = info
                else:
                    # Clean up expired
                    redis_client.hdel(REDIS_LOCK_KEY, filepath)
        except Exception as e:
            logger.debug(f"Redis lock status failed: {e}")

    # Merge file locks
    file_locks = _read_file_locks()
    for filepath, info in file_locks.items():
        if filepath not in locks:
            info["source"] = "file"
            locks[filepath] = info

    return json.dumps({
        "locks": locks,
        "count": len(locks),
        "redis_available": redis_client is not None
    })


def check_file_locked(filepath: str) -> Optional[str]:
    """
    Check if a specific file is locked (used by pre_tool_check hook).

    Args:
        filepath: Path to check

    Returns:
        Agent name that holds the lock, or None if unlocked
    """
    now = datetime.utcnow()

    # Check Redis first
    redis_client = _get_redis()
    if redis_client:
        try:
            existing = redis_client.hget(REDIS_LOCK_KEY, filepath)
            if existing:
                if isinstance(existing, bytes):
                    existing = existing.decode("utf-8")
                info = json.loads(existing)
                expires = datetime.fromisoformat(info["expires_at"])
                if expires > now:
                    return info["agent"]
        except Exception:
            pass

    # Check file fallback
    file_locks = _read_file_locks()
    if filepath in file_locks:
        return file_locks[filepath]["agent"]

    return None
