"""
AIVA Memory Bridge - PM-022

Connects AIVA to RLM/Qdrant/Redis memory systems.
Provides unified interface for memory queries across all storage backends.
"""

import os
import json
import logging
from datetime import datetime
from typing import Dict, List, Optional, Any, Union
from dataclasses import dataclass, asdict

# Configure logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)


@dataclass
class MemoryResult:
    """Result from a memory query."""
    source: str  # "postgresql", "qdrant", "redis"
    content: Any
    score: Optional[float] = None
    metadata: Optional[Dict] = None
    timestamp: Optional[str] = None

    def to_dict(self) -> Dict:
        """Convert to dictionary."""
        return asdict(self)


@dataclass
class Entity:
    """Entity stored in PostgreSQL RLM."""
    entity_id: str
    entity_type: str
    name: str
    properties: Dict
    created_at: str
    updated_at: str


class PostgreSQLConnector:
    """Connector for PostgreSQL entities (RLM)."""

    def __init__(self, connection_string: Optional[str] = None):
        """Initialize PostgreSQL connection."""
        self.connection_string = connection_string or os.getenv(
            "DATABASE_URL",
            "postgresql://genesis:genesis@localhost:5432/genesis_rlm"
        )
        self._connection = None
        logger.info("PostgreSQL connector initialized")

    def connect(self) -> bool:
        """Establish database connection."""
        try:
            import psycopg2
            self._connection = psycopg2.connect(self.connection_string)
            logger.info("PostgreSQL connection established")
            return True
        except ImportError:
            logger.warning("psycopg2 not installed, PostgreSQL queries will be simulated")
            return False
        except Exception as e:
            logger.error(f"PostgreSQL connection failed: {e}")
            return False

    def query_entities(
        self,
        entity_type: Optional[str] = None,
        filters: Optional[Dict] = None,
        limit: int = 10
    ) -> List[Entity]:
        """
        Query entities from PostgreSQL.

        Args:
            entity_type: Filter by entity type
            filters: Additional filters
            limit: Maximum results

        Returns:
            List of Entity objects
        """
        # Simulated implementation for when DB is unavailable
        logger.info(f"Querying PostgreSQL entities: type={entity_type}, limit={limit}")

        # In production, this would execute SQL
        # SELECT * FROM entities WHERE type = %s LIMIT %s
        return []

    def get_entity(self, entity_id: str) -> Optional[Entity]:
        """Get a specific entity by ID."""
        logger.info(f"Fetching entity: {entity_id}")
        return None

    def store_entity(self, entity: Entity) -> bool:
        """Store or update an entity."""
        logger.info(f"Storing entity: {entity.entity_id}")
        return True


class QdrantConnector:
    """Connector for Qdrant vector database."""

    def __init__(self, host: str = "localhost", port: int = 6333):
        """Initialize Qdrant connection."""
        self.host = host
        self.port = port
        self.collection_name = "genesis_memory"
        self._client = None
        logger.info(f"Qdrant connector initialized: {host}:{port}")

    def connect(self) -> bool:
        """Establish Qdrant connection."""
        try:
            from qdrant_client import QdrantClient
            self._client = QdrantClient(host=self.host, port=self.port)
            logger.info("Qdrant connection established")
            return True
        except ImportError:
            logger.warning("qdrant_client not installed, semantic search will be simulated")
            return False
        except Exception as e:
            logger.error(f"Qdrant connection failed: {e}")
            return False

    def semantic_search(
        self,
        query_vector: List[float],
        limit: int = 5,
        filter_conditions: Optional[Dict] = None
    ) -> List[MemoryResult]:
        """
        Perform semantic search in Qdrant.

        Args:
            query_vector: Query embedding vector
            limit: Maximum results
            filter_conditions: Metadata filters

        Returns:
            List of MemoryResult objects
        """
        logger.info(f"Semantic search in Qdrant: limit={limit}")

        # In production:
        # results = self._client.search(
        #     collection_name=self.collection_name,
        #     query_vector=query_vector,
        #     limit=limit,
        #     query_filter=filter_conditions
        # )
        return []

    def search_by_text(
        self,
        text: str,
        limit: int = 5
    ) -> List[MemoryResult]:
        """
        Search by text (requires embedding generation).

        Args:
            text: Search query text
            limit: Maximum results

        Returns:
            List of MemoryResult objects
        """
        logger.info(f"Text search in Qdrant: '{text[:50]}...'")
        # Would generate embedding and call semantic_search
        return []

    def store_embedding(
        self,
        vector: List[float],
        payload: Dict,
        point_id: Optional[str] = None
    ) -> bool:
        """Store a vector embedding with payload."""
        logger.info(f"Storing embedding in Qdrant: id={point_id}")
        return True


class RedisConnector:
    """Connector for Redis real-time state."""

    def __init__(self, host: str = "localhost", port: int = 6379, db: int = 0):
        """Initialize Redis connection."""
        self.host = host
        self.port = port
        self.db = db
        self._client = None
        logger.info(f"Redis connector initialized: {host}:{port}/{db}")

    def connect(self) -> bool:
        """Establish Redis connection."""
        try:
            import redis
            self._client = redis.Redis(host=self.host, port=self.port, db=self.db)
            self._client.ping()
            logger.info("Redis connection established")
            return True
        except ImportError:
            logger.warning("redis not installed, real-time state will be simulated")
            return False
        except Exception as e:
            logger.error(f"Redis connection failed: {e}")
            return False

    def get_state(self, key: str) -> Optional[Any]:
        """Get real-time state by key."""
        logger.debug(f"Getting Redis state: {key}")
        if self._client:
            try:
                value = self._client.get(key)
                if value:
                    return json.loads(value)
            except Exception as e:
                logger.error(f"Redis get failed: {e}")
        return None

    def set_state(self, key: str, value: Any, ttl: Optional[int] = None) -> bool:
        """Set real-time state."""
        logger.debug(f"Setting Redis state: {key}")
        if self._client:
            try:
                serialized = json.dumps(value)
                if ttl:
                    self._client.setex(key, ttl, serialized)
                else:
                    self._client.set(key, serialized)
                return True
            except Exception as e:
                logger.error(f"Redis set failed: {e}")
        return False

    def get_queue(self, queue_name: str, count: int = 10) -> List[Any]:
        """Get items from a Redis list/queue."""
        logger.debug(f"Getting Redis queue: {queue_name}")
        if self._client:
            try:
                items = self._client.lrange(queue_name, 0, count - 1)
                return [json.loads(item) for item in items]
            except Exception as e:
                logger.error(f"Redis lrange failed: {e}")
        return []

    def push_queue(self, queue_name: str, value: Any) -> bool:
        """Push item to a Redis queue."""
        logger.debug(f"Pushing to Redis queue: {queue_name}")
        if self._client:
            try:
                self._client.rpush(queue_name, json.dumps(value))
                return True
            except Exception as e:
                logger.error(f"Redis rpush failed: {e}")
        return False


class MemoryBridge:
    """
    Unified memory interface for AIVA.

    Connects to:
    - PostgreSQL: Entity storage (RLM)
    - Qdrant: Semantic/vector search
    - Redis: Real-time state and queues
    """

    def __init__(self):
        """Initialize all memory connectors."""
        self.postgresql = PostgreSQLConnector()
        self.qdrant = QdrantConnector()
        self.redis = RedisConnector()
        self._connected = False
        logger.info("MemoryBridge initialized")

    def connect_all(self) -> Dict[str, bool]:
        """
        Connect to all memory backends.

        Returns:
            Dict showing connection status for each backend
        """
        status = {
            "postgresql": self.postgresql.connect(),
            "qdrant": self.qdrant.connect(),
            "redis": self.redis.connect()
        }
        self._connected = any(status.values())
        logger.info(f"Memory connections: {status}")
        return status

    def search_memory(
        self,
        query: str,
        sources: Optional[List[str]] = None,
        limit: int = 10
    ) -> List[MemoryResult]:
        """
        Search across all memory sources.

        Args:
            query: Search query (text for semantic, or structured for SQL)
            sources: List of sources to search ["postgresql", "qdrant", "redis"]
            limit: Maximum results per source

        Returns:
            Combined list of MemoryResult objects
        """
        sources = sources or ["postgresql", "qdrant", "redis"]
        results: List[MemoryResult] = []

        logger.info(f"Searching memory: query='{query[:50]}...' sources={sources}")

        if "postgresql" in sources:
            # Search PostgreSQL entities
            entities = self.postgresql.query_entities(limit=limit)
            for entity in entities:
                results.append(MemoryResult(
                    source="postgresql",
                    content=entity,
                    metadata={"entity_type": entity.entity_type}
                ))

        if "qdrant" in sources:
            # Semantic search in Qdrant
            qdrant_results = self.qdrant.search_by_text(query, limit=limit)
            results.extend(qdrant_results)

        if "redis" in sources:
            # Check Redis for cached/recent data
            cached = self.redis.get_state(f"cache:{query[:32]}")
            if cached:
                results.append(MemoryResult(
                    source="redis",
                    content=cached,
                    metadata={"cached": True}
                ))

        logger.info(f"Memory search returned {len(results)} results")
        return results

    def store_memory(
        self,
        content: Any,
        memory_type: str = "general",
        metadata: Optional[Dict] = None
    ) -> Dict[str, bool]:
        """
        Store content in appropriate memory backends.

        Args:
            content: Content to store
            memory_type: Type of memory (entity, embedding, state)
            metadata: Additional metadata

        Returns:
            Dict showing storage status for each backend
        """
        metadata = metadata or {}
        status = {}

        logger.info(f"Storing memory: type={memory_type}")

        if memory_type == "entity":
            # Store as PostgreSQL entity
            entity = Entity(
                entity_id=metadata.get("id", str(hash(str(content)))),
                entity_type=metadata.get("entity_type", "general"),
                name=metadata.get("name", "unnamed"),
                properties={"content": content, **metadata},
                created_at=datetime.utcnow().isoformat(),
                updated_at=datetime.utcnow().isoformat()
            )
            status["postgresql"] = self.postgresql.store_entity(entity)

        if memory_type in ["embedding", "general"]:
            # Store as Qdrant embedding (would need to generate vector)
            # Placeholder: actual implementation needs embedding model
            status["qdrant"] = True

        if memory_type in ["state", "cache"]:
            # Store in Redis
            key = metadata.get("key", f"memory:{hash(str(content))}")
            ttl = metadata.get("ttl")
            status["redis"] = self.redis.set_state(key, content, ttl)

        return status

    def get_context(self, context_type: str = "current") -> Dict[str, Any]:
        """
        Get AIVA's current context from memory.

        Args:
            context_type: Type of context to retrieve

        Returns:
            Dict containing context information
        """
        context = {
            "retrieved_at": datetime.utcnow().isoformat(),
            "context_type": context_type
        }

        # Get current state from Redis
        current_state = self.redis.get_state("aiva:current_state")
        if current_state:
            context["current_state"] = current_state

        # Get recent tasks
        recent_tasks = self.redis.get_queue("aiva:recent_tasks", count=5)
        context["recent_tasks"] = recent_tasks

        # Get active objectives
        objectives = self.redis.get_state("aiva:objectives")
        if objectives:
            context["objectives"] = objectives

        return context

    def update_context(self, updates: Dict[str, Any]) -> bool:
        """
        Update AIVA's context in memory.

        Args:
            updates: Context updates to apply

        Returns:
            True if update successful
        """
        logger.info(f"Updating context with {len(updates)} keys")

        for key, value in updates.items():
            self.redis.set_state(f"aiva:{key}", value)

        return True


# Singleton instance
_memory_bridge: Optional[MemoryBridge] = None


def get_memory_bridge() -> MemoryBridge:
    """Get or create the singleton MemoryBridge instance."""
    global _memory_bridge
    if _memory_bridge is None:
        _memory_bridge = MemoryBridge()
    return _memory_bridge


if __name__ == "__main__":
    # Example usage
    bridge = MemoryBridge()

    # Connect to all backends
    status = bridge.connect_all()
    print(f"Connection status: {status}")

    # Search memory
    results = bridge.search_memory("AIVA objectives", limit=5)
    print(f"Search results: {len(results)}")

    # Store memory
    store_status = bridge.store_memory(
        {"task": "test", "status": "complete"},
        memory_type="state",
        metadata={"key": "test:memory"}
    )
    print(f"Store status: {store_status}")

    # Get context
    context = bridge.get_context()
    print(f"Current context: {context}")
