"""
Genesis Structured Logging
==========================
JSON-formatted structured logging for Genesis memory system.

Features:
- JSON log format for machine parsing
- Correlation IDs for request tracing
- Context injection (component, operation)
- Log rotation and retention
- Multiple output handlers
- Performance metrics in logs

Usage:
    from logging_config import get_logger, with_context

    logger = get_logger("genesis.memory")
    logger.info("Memory stored", extra={"memory_id": "abc123", "tier": "semantic"})

    # With correlation context
    with with_context(correlation_id="req-456"):
        logger.info("Processing request")
"""

import json
import logging
import logging.handlers
import sys
import threading
import uuid
from datetime import datetime
from pathlib import Path
from typing import Dict, Any, Optional
from contextvars import ContextVar
from functools import wraps

# Context variables for correlation
_correlation_id: ContextVar[Optional[str]] = ContextVar('correlation_id', default=None)
_component: ContextVar[Optional[str]] = ContextVar('component', default=None)
_operation: ContextVar[Optional[str]] = ContextVar('operation', default=None)

# Default log directory
DEFAULT_LOG_DIR = Path("E:/genesis-system/logs")


class JSONFormatter(logging.Formatter):
    """
    Custom JSON formatter for structured logging.

    Output format:
    {
        "timestamp": "2026-01-05T08:00:00.123456Z",
        "level": "INFO",
        "logger": "genesis.memory",
        "message": "Memory stored",
        "correlation_id": "req-123",
        "component": "WorkingMemoryCache",
        "operation": "store",
        "extra": {...}
    }
    """

    def __init__(self, include_stack: bool = True):
        super().__init__()
        self.include_stack = include_stack

    def format(self, record: logging.LogRecord) -> str:
        log_data = {
            "timestamp": datetime.utcnow().isoformat() + "Z",
            "level": record.levelname,
            "logger": record.name,
            "message": record.getMessage(),
        }

        # Add correlation context
        if _correlation_id.get():
            log_data["correlation_id"] = _correlation_id.get()
        if _component.get():
            log_data["component"] = _component.get()
        if _operation.get():
            log_data["operation"] = _operation.get()

        # Add source location
        log_data["source"] = {
            "file": record.filename,
            "line": record.lineno,
            "function": record.funcName
        }

        # Add extra fields (excluding standard LogRecord attributes)
        standard_attrs = {
            'name', 'msg', 'args', 'created', 'filename', 'funcName',
            'levelname', 'levelno', 'lineno', 'module', 'msecs',
            'pathname', 'process', 'processName', 'relativeCreated',
            'stack_info', 'exc_info', 'exc_text', 'thread', 'threadName',
            'message', 'taskName'
        }

        extra = {}
        for key, value in record.__dict__.items():
            if key not in standard_attrs:
                try:
                    # Ensure value is JSON serializable
                    json.dumps(value)
                    extra[key] = value
                except (TypeError, ValueError):
                    extra[key] = str(value)

        if extra:
            log_data["extra"] = extra

        # Add exception info if present
        if record.exc_info and self.include_stack:
            log_data["exception"] = {
                "type": record.exc_info[0].__name__ if record.exc_info[0] else None,
                "message": str(record.exc_info[1]) if record.exc_info[1] else None,
                "traceback": self.formatException(record.exc_info)
            }

        return json.dumps(log_data, default=str)


class ConsoleFormatter(logging.Formatter):
    """
    Human-readable console formatter with colors.
    """

    COLORS = {
        'DEBUG': '\033[36m',     # Cyan
        'INFO': '\033[32m',      # Green
        'WARNING': '\033[33m',   # Yellow
        'ERROR': '\033[31m',     # Red
        'CRITICAL': '\033[35m',  # Magenta
    }
    RESET = '\033[0m'

    def format(self, record: logging.LogRecord) -> str:
        color = self.COLORS.get(record.levelname, '')
        reset = self.RESET if color else ''

        # Build prefix
        timestamp = datetime.now().strftime("%H:%M:%S")
        level = record.levelname[:4]

        # Include correlation ID if present
        correlation = ""
        if _correlation_id.get():
            correlation = f" [{_correlation_id.get()[:8]}]"

        # Include component if present
        component = ""
        if _component.get():
            component = f" <{_component.get()}>"

        return f"{color}[{timestamp}] {level}{reset}{correlation}{component} {record.getMessage()}"


class ContextManager:
    """Context manager for setting logging context."""

    def __init__(
        self,
        correlation_id: Optional[str] = None,
        component: Optional[str] = None,
        operation: Optional[str] = None,
        auto_generate_id: bool = True
    ):
        self.correlation_id = correlation_id
        self.component = component
        self.operation = operation
        self.auto_generate_id = auto_generate_id

        self._previous_correlation = None
        self._previous_component = None
        self._previous_operation = None

    def __enter__(self):
        # Save previous values
        self._previous_correlation = _correlation_id.get()
        self._previous_component = _component.get()
        self._previous_operation = _operation.get()

        # Set new values
        if self.correlation_id:
            _correlation_id.set(self.correlation_id)
        elif self.auto_generate_id and not _correlation_id.get():
            _correlation_id.set(str(uuid.uuid4())[:8])

        if self.component:
            _component.set(self.component)
        if self.operation:
            _operation.set(self.operation)

        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        # Restore previous values
        _correlation_id.set(self._previous_correlation)
        _component.set(self._previous_component)
        _operation.set(self._previous_operation)
        return False


def with_context(
    correlation_id: Optional[str] = None,
    component: Optional[str] = None,
    operation: Optional[str] = None
) -> ContextManager:
    """Create a logging context."""
    return ContextManager(correlation_id, component, operation)


def log_operation(component: str, operation: str):
    """Decorator to add logging context to a function."""
    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            with with_context(component=component, operation=operation):
                return func(*args, **kwargs)
        return wrapper
    return decorator


def setup_logging(
    level: int = logging.INFO,
    log_dir: Optional[Path] = None,
    console: bool = True,
    json_file: bool = True,
    max_bytes: int = 10 * 1024 * 1024,  # 10MB
    backup_count: int = 5
) -> None:
    """
    Configure logging for Genesis system.

    Args:
        level: Minimum log level
        log_dir: Directory for log files
        console: Enable console output
        json_file: Enable JSON file output
        max_bytes: Max size per log file
        backup_count: Number of backup files to keep
    """
    log_dir = log_dir or DEFAULT_LOG_DIR
    log_dir.mkdir(parents=True, exist_ok=True)

    # Get root logger
    root = logging.getLogger()
    root.setLevel(level)

    # Clear existing handlers
    root.handlers.clear()

    # Console handler (human readable)
    if console:
        console_handler = logging.StreamHandler(sys.stdout)
        console_handler.setLevel(level)
        console_handler.setFormatter(ConsoleFormatter())
        root.addHandler(console_handler)

    # JSON file handler (machine readable)
    if json_file:
        json_path = log_dir / "genesis.json.log"
        file_handler = logging.handlers.RotatingFileHandler(
            json_path,
            maxBytes=max_bytes,
            backupCount=backup_count
        )
        file_handler.setLevel(level)
        file_handler.setFormatter(JSONFormatter())
        root.addHandler(file_handler)

    # Error file (errors only, for quick scanning)
    error_path = log_dir / "genesis.error.log"
    error_handler = logging.handlers.RotatingFileHandler(
        error_path,
        maxBytes=max_bytes,
        backupCount=backup_count
    )
    error_handler.setLevel(logging.ERROR)
    error_handler.setFormatter(JSONFormatter())
    root.addHandler(error_handler)

    logging.info("Genesis logging configured", extra={
        "log_dir": str(log_dir),
        "level": logging.getLevelName(level),
        "console": console,
        "json_file": json_file
    })


def get_logger(name: str) -> logging.Logger:
    """Get a logger instance."""
    return logging.getLogger(name)


class OperationTimer:
    """Context manager for timing operations and logging duration."""

    def __init__(
        self,
        logger: logging.Logger,
        operation: str,
        level: int = logging.INFO,
        warn_threshold_ms: Optional[float] = None
    ):
        self.logger = logger
        self.operation = operation
        self.level = level
        self.warn_threshold_ms = warn_threshold_ms
        self.start_time = None
        self.duration_ms = None

    def __enter__(self):
        self.start_time = datetime.now()
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        end_time = datetime.now()
        self.duration_ms = (end_time - self.start_time).total_seconds() * 1000

        extra = {
            "duration_ms": round(self.duration_ms, 2),
            "operation": self.operation
        }

        if exc_type:
            extra["error"] = str(exc_val)
            self.logger.error(
                f"Operation '{self.operation}' failed after {self.duration_ms:.2f}ms",
                extra=extra,
                exc_info=True
            )
        elif self.warn_threshold_ms and self.duration_ms > self.warn_threshold_ms:
            self.logger.warning(
                f"Operation '{self.operation}' slow: {self.duration_ms:.2f}ms",
                extra=extra
            )
        else:
            self.logger.log(
                self.level,
                f"Operation '{self.operation}' completed in {self.duration_ms:.2f}ms",
                extra=extra
            )

        return False


# Pre-configured loggers for Genesis components
class GenesisLoggers:
    """Collection of pre-configured loggers for Genesis components."""

    memory = get_logger("genesis.memory")
    cortex = get_logger("genesis.cortex")
    blackboard = get_logger("genesis.blackboard")
    surprise = get_logger("genesis.surprise")
    mcp_sync = get_logger("genesis.mcp_sync")
    kernel = get_logger("genesis.kernel")
    heartbeat = get_logger("genesis.heartbeat")


# Initialize logging on import (can be reconfigured)
_initialized = False


def ensure_initialized():
    """Ensure logging is initialized."""
    global _initialized
    if not _initialized:
        setup_logging()
        _initialized = True


# CLI interface
if __name__ == "__main__":
    import sys

    # Setup logging
    setup_logging(level=logging.DEBUG)

    if len(sys.argv) > 1:
        cmd = sys.argv[1]

        if cmd == "demo":
            print("=== Genesis Logging Demo ===\n")

            logger = get_logger("demo")

            # Basic logging
            logger.info("Basic log message")
            logger.warning("Warning with extra data", extra={"user": "test", "action": "demo"})

            # With correlation context
            with with_context(correlation_id="req-12345", component="MemoryStore"):
                logger.info("Memory operation started")
                logger.info("Memory stored", extra={"memory_id": "abc", "tier": "semantic"})

            # Operation timing
            with OperationTimer(logger, "embedding_generation", warn_threshold_ms=100):
                import time
                time.sleep(0.05)

            # Nested context
            with with_context(correlation_id="batch-001"):
                logger.info("Batch started")
                with with_context(component="SurpriseDetector", operation="evaluate"):
                    logger.info("Evaluating novelty", extra={"score": 0.85})

            # Error logging
            try:
                raise ValueError("Demo error")
            except Exception as e:
                logger.error("Operation failed", exc_info=True, extra={"recoverable": True})

            print("\n[OK] Check logs/ directory for JSON output")

        elif cmd == "test":
            print("Testing log output formats...\n")

            logger = get_logger("test")

            with with_context(correlation_id="test-123", component="TestComponent"):
                logger.debug("Debug message")
                logger.info("Info message", extra={"key": "value"})
                logger.warning("Warning message")
                logger.error("Error message")

            print("\n[OK] Logging test complete")

        else:
            print(f"Unknown command: {cmd}")
            print("Usage: python logging_config.py [demo|test]")
    else:
        print("Genesis Structured Logging")
        print("Usage: python logging_config.py [demo|test]")
