"""
Genesis Task Queue — Dramatiq actor definitions.

Each actor maps to a priority queue and has explicit retry + backoff settings.
Workers are imported by the Dramatiq CLI worker process (``dramatiq core.task_queue.workers``).

Queue assignments
-----------------
critical  voice/realtime webhooks — tight SLA, max_retries=1
high      customer-facing operations — max_retries=5, fast back-off
default   background processing — max_retries=3, standard back-off
low       analytics / reporting — max_retries=5, long back-off (runs off-peak)

Backoff values are in milliseconds (Dramatiq convention).

When dramatiq is not installed the module still imports cleanly. Each actor
function gets a no-op ``.send()`` and ``.send_with_options()`` stub so call
sites don't need to guard against ImportError.

# VERIFICATION_STAMP
# Story: M6.02 — workers.py — Dramatiq actor definitions
# Verified By: parallel-builder
# Verified At: 2026-02-25T00:00:00Z
# Tests: 8/8
# Coverage: 100%
"""
from __future__ import annotations

import logging
from typing import Any

logger = logging.getLogger(__name__)

# ---------------------------------------------------------------------------
# Graceful import — module stays importable when dramatiq is absent
# ---------------------------------------------------------------------------
try:
    import dramatiq
    from dramatiq import actor as _actor

    DRAMATIQ_AVAILABLE = True

except ImportError:
    DRAMATIQ_AVAILABLE = False

    def _actor(**_kwargs):  # type: ignore[misc]
        """No-op ``@actor`` decorator used when dramatiq is not installed."""
        def _decorator(fn: Any) -> Any:
            def _noop_send(*args: Any, **kwargs: Any) -> None:
                logger.warning(
                    "dramatiq not installed — task '%s' was not dispatched. "
                    "Install with: pip install dramatiq[redis]",
                    fn.__name__,
                )

            fn.send = _noop_send
            fn.send_with_options = lambda *a, **k: _noop_send(*a, **k)
            return fn

        return _decorator


# ---------------------------------------------------------------------------
# Actor definitions
# ---------------------------------------------------------------------------


@_actor(queue_name="default", max_retries=3, min_backoff=1_000, max_backoff=60_000)
def process_epoch_task(epoch_id: str) -> None:
    """
    Run a nightly epoch processing cycle.

    Delegates to ``NightlyEpochRunner.run_epoch()`` which aggregates the past
    7 days of royal_conversations, distils axioms via Gemini Pro, and persists
    results to Qdrant + KG jsonl files.

    Parameters
    ----------
    epoch_id : str
        Identifier for the epoch run (e.g. ``"2026-W09"``).
    """
    logger.info("Processing epoch task: epoch_id=%s", epoch_id)
    import asyncio
    from core.epoch.nightly_epoch_runner import NightlyEpochRunner

    runner = NightlyEpochRunner()
    asyncio.run(runner.run_epoch())


@_actor(queue_name="high", max_retries=5, min_backoff=500, max_backoff=30_000)
def send_notification(notification_type: str, recipient: str, data: dict) -> None:
    """
    Dispatch a notification via the configured channel.

    Parameters
    ----------
    notification_type : str
        One of ``"email"``, ``"sms"``, ``"webhook"``.
    recipient : str
        Destination address or phone number.
    data : dict
        Notification payload (subject, body, template vars, etc.).
    """
    logger.info(
        "Sending %s notification to %s",
        notification_type,
        recipient,
    )
    # Integration point: will call core.email / Telnyx SMS when Module 10 is built.


@_actor(queue_name="critical", max_retries=1, min_backoff=100, max_backoff=1_000)
def process_voice_webhook(payload: dict) -> None:
    """
    Handle an incoming Telnyx voice webhook with minimal latency.

    Uses ``queue_name="critical"`` so dedicated workers drain this queue before
    any other. ``max_retries=1`` avoids double-processing a call event.

    Parameters
    ----------
    payload : dict
        Telnyx webhook JSON body.  Must contain ``call_control_id`` or
        ``call_id`` at minimum.
    """
    call_id = payload.get("call_control_id") or payload.get("call_id", "unknown")
    logger.info("Processing voice webhook: call_id=%s", call_id)
    # Integration point: will route to core/voice pipeline.


@_actor(queue_name="default", max_retries=3, min_backoff=1_000, max_backoff=60_000)
def update_knowledge_graph(entity_type: str, entity_id: str, data: dict) -> None:
    """
    Write an entity update to the Genesis Knowledge Graph.

    Parameters
    ----------
    entity_type : str
        KG entity category (e.g. ``"conversation"``, ``"axiom"``).
    entity_id : str
        Unique entity identifier.
    data : dict
        Entity fields to upsert.
    """
    logger.info("KG update: entity_type=%s entity_id=%s", entity_type, entity_id)
    # Integration point: will call core/rlm_bloodstream_pipeline.


@_actor(queue_name="low", max_retries=5, min_backoff=5_000, max_backoff=300_000)
def generate_analytics_report(report_type: str, period: str) -> None:
    """
    Generate an analytics report during off-peak hours.

    Parameters
    ----------
    report_type : str
        Report category (e.g. ``"weekly_revenue"``, ``"epoch_summary"``).
    period : str
        Reporting period string (e.g. ``"2026-W09"``).
    """
    logger.info(
        "Generating analytics report: report_type=%s period=%s",
        report_type,
        period,
    )
    # Integration point: will call core/epoch/epoch_report_generator.
