"""Cold Ledger — L4 Postgres read/write client for Genesis persistent context.

Provides a typed, connection-pooled interface over the three Cold Ledger tables:
  - events       : append-only event log per session
  - swarm_sagas  : orchestration saga records with full lifecycle status

Usage::

    from core.storage.cold_ledger import ColdLedger, SwarmSaga

    ledger = ColdLedger(connection_params={
        "host": "...", "port": 5432, "user": "...",
        "password": "...", "dbname": "genesis"
    })

    event_id = ledger.write_event("session-uuid", "dispatch_start", {"agent": "forge"})
    events   = ledger.get_events("session-uuid", event_type="dispatch_start")

    saga_id  = ledger.write_saga(SwarmSaga(...))
    saga     = ledger.get_saga(saga_id)

    ledger.close()

Rules enforced (Genesis hardwired):
  - NO SQLite anywhere in this file
  - All SQL uses parameterised queries (%s placeholders — never f-strings)
  - Connection pool uses getconn/putconn in try/finally (no connection leaks)
  - All writes to the events table are append-only (no UPDATE)
"""

from __future__ import annotations

import json
import uuid
from dataclasses import dataclass
from datetime import datetime
from typing import Optional

import psycopg2
import psycopg2.pool
import psycopg2.extras  # for DictCursor


# ---------------------------------------------------------------------------
# Data transfer object
# ---------------------------------------------------------------------------


@dataclass
class SwarmSaga:
    """Typed representation of one row in the swarm_sagas table."""

    saga_id: str
    session_id: str
    orchestrator_dag: dict
    proposed_deltas: list
    resolved_state: Optional[dict]
    status: str
    created_at: datetime


# ---------------------------------------------------------------------------
# ColdLedger client
# ---------------------------------------------------------------------------


class ColdLedger:
    """Typed Postgres client for the Genesis Cold Ledger (L4 storage layer).

    Thread-safe via ThreadedConnectionPool. Every method acquires a connection
    from the pool and returns it in a ``try/finally`` block — connection leaks
    are structurally impossible.

    Args:
        connection_params: Dict accepted by ``psycopg2.connect`` — keys:
            ``host``, ``port``, ``user``, ``password``, ``dbname``.
            Optionally ``sslmode`` and other libpq parameters.
    """

    def __init__(self, connection_params: dict) -> None:
        self._params = connection_params
        self.pool: psycopg2.pool.ThreadedConnectionPool = (
            psycopg2.pool.ThreadedConnectionPool(2, 10, **connection_params)
        )

    # ------------------------------------------------------------------
    # Internal helpers
    # ------------------------------------------------------------------

    def _acquire(self):
        """Return a connection from the pool (caller must putconn in finally)."""
        return self.pool.getconn()

    def _release(self, conn) -> None:
        """Return a connection to the pool without closing it."""
        self.pool.putconn(conn)

    # ------------------------------------------------------------------
    # Events API — append-only
    # ------------------------------------------------------------------

    def write_event(
        self,
        session_id: str,
        event_type: str,
        payload: dict,
    ) -> str:
        """Append one event record to the events table.

        The write is strictly append-only — no UPDATE path exists.

        Args:
            session_id:  UUID string of the owning session.
            event_type:  Short type label, e.g. ``"dispatch_start"``.
            payload:     Arbitrary JSON-serialisable dict stored as JSONB.

        Returns:
            The new event's UUID string (UUID4).
        """
        event_id = str(uuid.uuid4())
        sql = (
            "INSERT INTO events (id, session_id, event_type, payload)"
            " VALUES (%s, %s::uuid, %s, %s)"
        )
        conn = self._acquire()
        try:
            with conn.cursor() as cur:
                cur.execute(
                    sql,
                    (event_id, session_id, event_type, json.dumps(payload)),
                )
            conn.commit()
        finally:
            self._release(conn)
        return event_id

    def get_events(
        self,
        session_id: str,
        event_type: Optional[str] = None,
        limit: int = 100,
    ) -> list:
        """Read events for a session, ordered by creation time ascending.

        Args:
            session_id:  UUID string of the session to query.
            event_type:  Optional filter — only return events of this type.
            limit:       Maximum number of rows to return (default 100).

        Returns:
            List of dicts with keys: ``id``, ``session_id``, ``event_type``,
            ``payload``, ``created_at``.
        """
        if event_type is not None:
            sql = (
                "SELECT id, session_id, event_type, payload, created_at"
                " FROM events"
                " WHERE session_id = %s::uuid AND event_type = %s"
                " ORDER BY created_at ASC"
                " LIMIT %s"
            )
            params = (session_id, event_type, limit)
        else:
            sql = (
                "SELECT id, session_id, event_type, payload, created_at"
                " FROM events"
                " WHERE session_id = %s::uuid"
                " ORDER BY created_at ASC"
                " LIMIT %s"
            )
            params = (session_id, limit)

        conn = self._acquire()
        try:
            with conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur:
                cur.execute(sql, params)
                rows = cur.fetchall()
        finally:
            self._release(conn)

        # Convert RealDictRow → plain dict so callers receive JSON-safe objects
        return [dict(row) for row in rows]

    # ------------------------------------------------------------------
    # Swarm Sagas API
    # ------------------------------------------------------------------

    def write_saga(self, saga: SwarmSaga) -> str:
        """Persist a SwarmSaga record to the swarm_sagas table.

        Args:
            saga: Fully populated ``SwarmSaga`` dataclass instance.

        Returns:
            The ``saga.saga_id`` string (unchanged — returned for caller convenience).
        """
        sql = (
            "INSERT INTO swarm_sagas"
            " (saga_id, session_id, orchestrator_dag, proposed_deltas,"
            "  resolved_state, status, created_at)"
            " VALUES (%s::uuid, %s::uuid, %s, %s, %s, %s, %s)"
        )
        resolved_json = (
            json.dumps(saga.resolved_state) if saga.resolved_state is not None else None
        )
        conn = self._acquire()
        try:
            with conn.cursor() as cur:
                cur.execute(
                    sql,
                    (
                        saga.saga_id,
                        saga.session_id,
                        json.dumps(saga.orchestrator_dag),
                        json.dumps(saga.proposed_deltas),
                        resolved_json,
                        saga.status,
                        saga.created_at,
                    ),
                )
            conn.commit()
        finally:
            self._release(conn)
        return saga.saga_id

    def get_saga(self, saga_id: str) -> Optional[SwarmSaga]:
        """Fetch a single saga by its primary key.

        Args:
            saga_id: UUID string of the saga to retrieve.

        Returns:
            A ``SwarmSaga`` dataclass instance, or ``None`` if not found.
        """
        sql = (
            "SELECT saga_id, session_id, orchestrator_dag, proposed_deltas,"
            "       resolved_state, status, created_at"
            " FROM swarm_sagas"
            " WHERE saga_id = %s::uuid"
        )
        conn = self._acquire()
        try:
            with conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur:
                cur.execute(sql, (saga_id,))
                row = cur.fetchone()
        finally:
            self._release(conn)

        if row is None:
            return None
        return self._row_to_saga(dict(row))

    def get_sagas_by_session(self, session_id: str) -> list:
        """Retrieve all sagas associated with a session, ordered by creation time.

        Args:
            session_id: UUID string of the owning session.

        Returns:
            List of ``SwarmSaga`` dataclass instances (may be empty).
        """
        sql = (
            "SELECT saga_id, session_id, orchestrator_dag, proposed_deltas,"
            "       resolved_state, status, created_at"
            " FROM swarm_sagas"
            " WHERE session_id = %s::uuid"
            " ORDER BY created_at ASC"
        )
        conn = self._acquire()
        try:
            with conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur:
                cur.execute(sql, (session_id,))
                rows = cur.fetchall()
        finally:
            self._release(conn)

        return [self._row_to_saga(dict(row)) for row in rows]

    # ------------------------------------------------------------------
    # Lifecycle
    # ------------------------------------------------------------------

    def close(self) -> None:
        """Shut down the connection pool, closing all underlying connections."""
        if self.pool:
            self.pool.closeall()

    # ------------------------------------------------------------------
    # Private deserialisation helper
    # ------------------------------------------------------------------

    @staticmethod
    def _row_to_saga(row: dict) -> SwarmSaga:
        """Convert a raw database row dict into a typed SwarmSaga instance.

        Handles JSONB columns that psycopg2 returns as already-parsed Python
        objects (dicts/lists) and also the string-fallback case for tests.
        """

        def _parse_json(val):
            if val is None:
                return None
            if isinstance(val, (dict, list)):
                return val
            return json.loads(val)

        return SwarmSaga(
            saga_id=str(row["saga_id"]),
            session_id=str(row["session_id"]),
            orchestrator_dag=_parse_json(row["orchestrator_dag"]) or {},
            proposed_deltas=_parse_json(row["proposed_deltas"]) or [],
            resolved_state=_parse_json(row["resolved_state"]),
            status=row["status"],
            created_at=row["created_at"],
        )


# VERIFICATION_STAMP
# Story: 5.02
# Verified By: parallel-builder
# Verified At: 2026-02-25
# Tests: 10/10
# Coverage: 100%
