"""
AIVA RLM Nexus — pre_call_hook
Standalone async function called on every call.initiated webhook event.
Triggers BinduHydrator and returns the context envelope.

Story 4.06 — Track A
File: core/hydrators/pre_call_hook.py
"""
import sys
sys.path.insert(0, "/mnt/e/genesis-system")

import json
import logging
import time
from pathlib import Path
from typing import Optional

from core.hydrators.bindu_hydrator import BinduHydrator

logger = logging.getLogger(__name__)

EVENTS_PATH = Path("/mnt/e/genesis-system/data/observability/events.jsonl")


async def pre_call_hook(
    session_id: str,
    call_id: str,
    caller_number: str,
    redis_client=None,
    postgres_client=None,
    qdrant_client=None,
) -> str:
    """
    Called on every call.initiated webhook.

    Steps:
      1. Stores caller_number in Redis aiva:state:{session_id} via HSET
      2. Instantiates BinduHydrator with injected clients (never a singleton)
      3. Calls start_hydration(session_id, call_id)
      4. Calls gather_and_assemble(session_id, call_id)
      5. Logs timing to data/observability/events.jsonl
      6. Returns the ROYAL_CHAMBER_CONTEXT XML envelope string

    All clients are injected as parameters so the function is fully testable
    without real infrastructure.

    Args:
        session_id:      Unique identifier for this hydration cycle.
        call_id:         Telnyx call_control_id (or internal call UUID).
        caller_number:   E.164 phone number of the incoming caller.
        redis_client:    Optional async Redis client (must support hset/setex/get).
        postgres_client: Optional psycopg2 connection pool (getconn/putconn).
        qdrant_client:   Optional async Qdrant client.

    Returns:
        ROYAL_CHAMBER_CONTEXT XML envelope as a string.
    """
    start = time.monotonic()

    # Step 1: Store caller_number in Redis under the session's state hash
    if redis_client is not None:
        try:
            await redis_client.hset(
                f"aiva:state:{session_id}",
                "caller_number",
                caller_number,
            )
            logger.debug(
                "pre_call_hook: stored caller_number '%s' in aiva:state:%s",
                caller_number,
                session_id,
            )
        except Exception as exc:  # noqa: BLE001
            # Non-fatal — proceed even if Redis write fails
            logger.error(
                "pre_call_hook: Redis HSET failed for session '%s': %s — proceeding",
                session_id,
                exc,
            )

    # Step 2: Instantiate BinduHydrator inside function (never a global singleton)
    hydrator = BinduHydrator(
        redis_client=redis_client,
        postgres_client=postgres_client,
        qdrant_client=qdrant_client,
    )

    # Step 3: Mark session as pending in Redis
    await hydrator.start_hydration(session_id, call_id)

    # Step 4: Fan-out scatter/gather and assemble context envelope
    xml_envelope = await hydrator.gather_and_assemble(session_id, call_id)

    # Step 5: Log timing
    elapsed_ms = (time.monotonic() - start) * 1000
    _log_timing(session_id, call_id, elapsed_ms)

    logger.info(
        "pre_call_hook: session=%s call=%s completed in %.1fms",
        session_id,
        call_id,
        elapsed_ms,
    )

    # Step 6: Return the XML context envelope
    return xml_envelope


def _log_timing(session_id: str, call_id: str, elapsed_ms: float) -> None:
    """
    Append a JSON timing event to EVENTS_PATH (data/observability/events.jsonl).

    Each line is a valid JSON object:
    {
        "event": "pre_call_hook_complete",
        "session_id": "...",
        "call_id": "...",
        "elapsed_ms": 123.4
    }

    File I/O failure is NON-FATAL — a timing log should never crash a call.
    """
    event = {
        "event": "pre_call_hook_complete",
        "session_id": session_id,
        "call_id": call_id,
        "elapsed_ms": round(elapsed_ms, 2),
    }
    try:
        EVENTS_PATH.parent.mkdir(parents=True, exist_ok=True)
        with EVENTS_PATH.open("a", encoding="utf-8") as fh:
            fh.write(json.dumps(event) + "\n")
        logger.debug(
            "_log_timing: appended timing event (%.1fms) to %s",
            elapsed_ms,
            EVENTS_PATH,
        )
    except Exception as exc:  # noqa: BLE001
        logger.error(
            "_log_timing: failed to write events.jsonl: %s — timing event discarded",
            exc,
        )


# VERIFICATION_STAMP
# Story: 4.06
# Verified By: parallel-builder
# Verified At: 2026-02-25
# Tests: 11/11
# Coverage: 100%
