"""
tests/track_b/test_story_7_05.py

Story B-7.05: MergeTelemetry — Merge Analytics

Black Box Tests (BB1–BB4):
    BB1  Record 10 merges (3 conflicts, 2 Opus) → correct rates
    BB2  get_stats() on fresh instance → zero everything
    BB3  avg_latency_ms calculated correctly from recorded latencies
    BB4  MergeRecord is serialisable via dataclasses.asdict()

White Box Tests (WB1–WB4):
    WB1  Redis INCR used for counters (verify mock call args)
    WB2  Events written to events.jsonl (verify file content)
    WB3  Division by zero handled (total=0 → 0.0 rates)
    WB4  Redis rpush used for latencies (not individual keys)

Package Test:
    PKG  core.merge exports MergeTelemetry and MergeRecord
"""

from __future__ import annotations

import json
import sys
from dataclasses import asdict
from pathlib import Path
from unittest.mock import MagicMock, call

import pytest

# ---------------------------------------------------------------------------
# Path setup
# ---------------------------------------------------------------------------

GENESIS_ROOT = "/mnt/e/genesis-system"
if GENESIS_ROOT not in sys.path:
    sys.path.insert(0, GENESIS_ROOT)

# ---------------------------------------------------------------------------
# Imports under test
# ---------------------------------------------------------------------------

from core.merge.merge_telemetry import (  # noqa: E402
    MergeTelemetry,
    MergeRecord,
    REDIS_PREFIX,
    _KEY_TOTAL,
    _KEY_CONFLICTS,
    _KEY_OPUS_CALLS,
    _KEY_LATENCIES,
)


# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------

def make_record(
    session_id: str = "sess-001",
    delta_count: int = 2,
    conflict_count: int = 0,
    used_opus: bool = False,
    merge_latency_ms: float = 10.0,
    success: bool = True,
) -> MergeRecord:
    """Convenience factory for MergeRecord test instances."""
    return MergeRecord(
        session_id=session_id,
        delta_count=delta_count,
        conflict_count=conflict_count,
        used_opus=used_opus,
        merge_latency_ms=merge_latency_ms,
        success=success,
    )


def make_redis_mock(total: int = 0, conflicts: int = 0, opus_calls: int = 0,
                    latencies: list[float] | None = None) -> MagicMock:
    """
    Build a MagicMock Redis client whose get() and lrange() return
    pre-set values, simulating a Redis instance with existing data.
    """
    mock = MagicMock()

    def _get(key):
        mapping = {
            _KEY_TOTAL: str(total).encode(),
            _KEY_CONFLICTS: str(conflicts).encode(),
            _KEY_OPUS_CALLS: str(opus_calls).encode(),
        }
        return mapping.get(key, b"0")

    def _lrange(key, start, end):
        if key == _KEY_LATENCIES and latencies:
            return [str(v).encode() for v in latencies]
        return []

    mock.get.side_effect = _get
    mock.lrange.side_effect = _lrange
    return mock


# ===========================================================================
# BB Tests — Black Box
# ===========================================================================


def test_bb1_correct_rates_after_10_merges():
    """
    BB1: Record 10 merges (3 with conflicts, 2 using Opus) via Redis mock.
         Expect conflict_rate_pct=30.0 and opus_rate_pct=20.0.
    """
    redis_mock = make_redis_mock(
        total=10,
        conflicts=3,
        opus_calls=2,
        latencies=[float(i * 5) for i in range(1, 11)],  # 5, 10, …, 50
    )
    telemetry = MergeTelemetry(redis_client=redis_mock, events_path="/dev/null")

    stats = telemetry.get_stats()

    assert stats["total_merges"] == 10
    assert abs(stats["conflict_rate_pct"] - 30.0) < 1e-6, (
        f"Expected 30.0 got {stats['conflict_rate_pct']}"
    )
    assert abs(stats["opus_rate_pct"] - 20.0) < 1e-6, (
        f"Expected 20.0 got {stats['opus_rate_pct']}"
    )


def test_bb2_fresh_instance_all_zeros():
    """BB2: A brand-new MergeTelemetry with no records → all stats are zero."""
    telemetry = MergeTelemetry(redis_client=None)

    stats = telemetry.get_stats()

    assert stats["total_merges"] == 0
    assert stats["conflict_rate_pct"] == 0.0
    assert stats["opus_rate_pct"] == 0.0
    assert stats["avg_latency_ms"] == 0.0


def test_bb3_avg_latency_calculated_correctly(tmp_path: Path):
    """BB3: avg_latency_ms is the arithmetic mean of all recorded latencies."""
    events_file = tmp_path / "events.jsonl"
    # Use no-Redis mode to exercise in-memory latency path
    telemetry = MergeTelemetry(redis_client=None, events_path=str(events_file))

    latencies = [10.0, 20.0, 30.0]
    for lat in latencies:
        telemetry.record(make_record(merge_latency_ms=lat))

    stats = telemetry.get_stats()
    expected_avg = sum(latencies) / len(latencies)  # 20.0
    assert abs(stats["avg_latency_ms"] - expected_avg) < 1e-6, (
        f"Expected {expected_avg} got {stats['avg_latency_ms']}"
    )


def test_bb4_merge_record_serialisable():
    """BB4: MergeRecord is fully serialisable via dataclasses.asdict()."""
    record = make_record(
        session_id="sess-XYZ",
        delta_count=5,
        conflict_count=1,
        used_opus=True,
        merge_latency_ms=42.5,
        success=False,
    )
    d = asdict(record)

    assert d["session_id"] == "sess-XYZ"
    assert d["delta_count"] == 5
    assert d["conflict_count"] == 1
    assert d["used_opus"] is True
    assert abs(d["merge_latency_ms"] - 42.5) < 1e-9
    assert d["success"] is False
    # Must be JSON-serialisable (no exotic types)
    json.dumps(d)


# ===========================================================================
# WB Tests — White Box
# ===========================================================================


def test_wb1_redis_incr_used_for_counters(tmp_path: Path):
    """
    WB1: record() must call redis.incr() with the correct counter keys.
         - Always increments :total
         - Increments :conflicts when conflict_count > 0
         - Increments :opus_calls when used_opus is True
    """
    redis_mock = MagicMock()
    redis_mock.get.return_value = b"0"
    redis_mock.lrange.return_value = []

    events_file = tmp_path / "events.jsonl"
    telemetry = MergeTelemetry(redis_client=redis_mock, events_path=str(events_file))

    # Record: has a conflict AND used Opus
    record = make_record(conflict_count=2, used_opus=True, merge_latency_ms=5.0)
    telemetry.record(record)

    incr_calls = [c.args[0] for c in redis_mock.incr.call_args_list]
    assert _KEY_TOTAL in incr_calls, "incr(:total) must be called"
    assert _KEY_CONFLICTS in incr_calls, "incr(:conflicts) must be called when conflicts > 0"
    assert _KEY_OPUS_CALLS in incr_calls, "incr(:opus_calls) must be called when used_opus=True"


def test_wb1b_redis_no_conflict_incr(tmp_path: Path):
    """WB1b: When conflict_count=0, :conflicts counter must NOT be incremented."""
    redis_mock = MagicMock()
    redis_mock.get.return_value = b"0"
    redis_mock.lrange.return_value = []

    events_file = tmp_path / "events.jsonl"
    telemetry = MergeTelemetry(redis_client=redis_mock, events_path=str(events_file))

    telemetry.record(make_record(conflict_count=0, used_opus=False))

    incr_calls = [c.args[0] for c in redis_mock.incr.call_args_list]
    assert _KEY_TOTAL in incr_calls
    assert _KEY_CONFLICTS not in incr_calls, ":conflicts must NOT be incremented when conflict_count=0"
    assert _KEY_OPUS_CALLS not in incr_calls, ":opus_calls must NOT be incremented when used_opus=False"


def test_wb2_events_written_to_jsonl(tmp_path: Path):
    """
    WB2: record() must append a valid JSON line to events.jsonl.
         The serialised record must contain all MergeRecord fields.
    """
    events_file = tmp_path / "events.jsonl"
    telemetry = MergeTelemetry(redis_client=None, events_path=str(events_file))

    record = make_record(
        session_id="sess-write-test",
        delta_count=3,
        conflict_count=1,
        used_opus=True,
        merge_latency_ms=77.7,
        success=True,
    )
    telemetry.record(record)

    assert events_file.exists(), "events.jsonl must be created by record()"

    lines = events_file.read_text(encoding="utf-8").strip().splitlines()
    assert len(lines) == 1, "Exactly one JSON line must have been written"

    parsed = json.loads(lines[0])
    assert parsed["session_id"] == "sess-write-test"
    assert parsed["delta_count"] == 3
    assert parsed["conflict_count"] == 1
    assert parsed["used_opus"] is True
    assert abs(parsed["merge_latency_ms"] - 77.7) < 1e-6
    assert parsed["success"] is True


def test_wb2_multiple_events_appended(tmp_path: Path):
    """WB2 extension: Multiple record() calls each append one line to events.jsonl."""
    events_file = tmp_path / "multi_events.jsonl"
    telemetry = MergeTelemetry(redis_client=None, events_path=str(events_file))

    for i in range(5):
        telemetry.record(make_record(session_id=f"sess-{i}", merge_latency_ms=float(i)))

    lines = events_file.read_text(encoding="utf-8").strip().splitlines()
    assert len(lines) == 5

    session_ids = [json.loads(ln)["session_id"] for ln in lines]
    assert session_ids == [f"sess-{i}" for i in range(5)]


def test_wb3_division_by_zero_safe():
    """WB3: get_stats() on a zero-total telemetry returns 0.0 rates (no ZeroDivisionError)."""
    telemetry = MergeTelemetry(redis_client=None)

    # Must not raise
    stats = telemetry.get_stats()

    assert stats["total_merges"] == 0
    assert stats["conflict_rate_pct"] == 0.0
    assert stats["opus_rate_pct"] == 0.0
    assert stats["avg_latency_ms"] == 0.0


def test_wb3_redis_zero_total():
    """WB3 (Redis path): When Redis returns 0 for total, all rates must be 0.0."""
    redis_mock = make_redis_mock(total=0, conflicts=0, opus_calls=0, latencies=[])
    telemetry = MergeTelemetry(redis_client=redis_mock, events_path="/dev/null")

    stats = telemetry.get_stats()

    assert stats["total_merges"] == 0
    assert stats["conflict_rate_pct"] == 0.0
    assert stats["opus_rate_pct"] == 0.0
    assert stats["avg_latency_ms"] == 0.0


def test_wb4_redis_rpush_used_for_latencies(tmp_path: Path):
    """
    WB4: record() must call redis.rpush() with the latency key (not individual
         per-record keys).  The latency is appended to a single list.
    """
    redis_mock = MagicMock()
    redis_mock.get.return_value = b"0"
    redis_mock.lrange.return_value = []

    events_file = tmp_path / "events.jsonl"
    telemetry = MergeTelemetry(redis_client=redis_mock, events_path=str(events_file))

    telemetry.record(make_record(merge_latency_ms=99.9))

    # rpush must have been called with the latencies list key
    rpush_calls = redis_mock.rpush.call_args_list
    assert len(rpush_calls) == 1, "Exactly one rpush call per record() is expected"

    key_used = rpush_calls[0].args[0]
    value_pushed = rpush_calls[0].args[1]

    assert key_used == _KEY_LATENCIES, (
        f"Expected rpush to {_KEY_LATENCIES!r}, got {key_used!r}"
    )
    assert value_pushed == "99.9", (
        f"Expected latency value '99.9', got {value_pushed!r}"
    )


# ===========================================================================
# Package import test
# ===========================================================================


def test_pkg_core_merge_exports():
    """PKG: core.merge __init__.py exports MergeTelemetry and MergeRecord."""
    from core.merge import MergeTelemetry as MT, MergeRecord as MR  # noqa: F401

    assert MT is MergeTelemetry
    assert MR is MergeRecord
