#!/usr/bin/env python3
"""
QUEUE REFILLER - Keeps AIVA's task queue full for continuous development.

Monitors the queue and adds tasks from the PRD when it gets low.
"""

import os
import sys
import json
import redis
import hashlib
import logging
from datetime import datetime
from pathlib import Path

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s [REFILLER] %(message)s'
)
logger = logging.getLogger()

GENESIS_ROOT = Path("/mnt/e/genesis-system")

# All tasks for the 8-hour session
ALL_TASKS = [
    # Phase 1: Patent Mastery (already many queued, but adding more depth)
    {"id": "PM-021", "description": "Build unified patent knowledge index from all 9 patents", "priority": 2},
    {"id": "PM-022", "description": "Create patent-to-code mapping for existing Genesis files", "priority": 2},
    {"id": "PM-023", "description": "Generate patent claim verification checklist", "priority": 2},
    {"id": "PM-024", "description": "Build patent dependency graph visualization data", "priority": 2},
    {"id": "PM-025", "description": "Extract and index all technical terminology from patents", "priority": 2},

    # Phase 2: Triple Gate MVP
    {"id": "TG-001", "description": "Design Gate 1 schema for P1 (Cryptographic), P2 (Currency), P4 (Audit Trail)", "priority": 1},
    {"id": "TG-002", "description": "Design Gate 2 schema for P3 (Risk), P5 (Consensus), P6 (Confidence)", "priority": 1},
    {"id": "TG-003", "description": "Design Gate 3 schema for P7 (Hallucination), P8 (Privacy), P9 (Thresholds)", "priority": 1},
    {"id": "TG-004", "description": "Implement Gate 1 validator stub in /mnt/e/genesis-system/AIVA/validation_gate1.py", "priority": 1},
    {"id": "TG-005", "description": "Implement Gate 2 validator stub in /mnt/e/genesis-system/AIVA/validation_gate2.py", "priority": 1},
    {"id": "TG-006", "description": "Implement Gate 3 validator stub in /mnt/e/genesis-system/AIVA/validation_gate3.py", "priority": 1},
    {"id": "TG-007", "description": "Create validation pipeline orchestrator that chains Gate1 -> Gate2 -> Gate3", "priority": 1},
    {"id": "TG-008", "description": "Build test harness for triple gate validation", "priority": 2},
    {"id": "TG-009", "description": "Implement cryptographic hash verification function for Gate 1", "priority": 2},
    {"id": "TG-010", "description": "Implement timestamp currency validation for Gate 1", "priority": 2},
    {"id": "TG-011", "description": "Implement risk score calculator for Gate 2", "priority": 2},
    {"id": "TG-012", "description": "Implement audit trail logger for Gate 1", "priority": 2},
    {"id": "TG-013", "description": "Implement multi-model consensus checker for Gate 2", "priority": 2},
    {"id": "TG-014", "description": "Implement confidence scorer for Gate 2", "priority": 2},
    {"id": "TG-015", "description": "Implement hallucination detector for Gate 3", "priority": 2},
    {"id": "TG-016", "description": "Implement privacy anonymizer for Gate 3", "priority": 2},
    {"id": "TG-017", "description": "Implement adaptive threshold adjuster for Gate 3", "priority": 2},
    {"id": "TG-018", "description": "Create end-to-end pipeline test for full triple gate flow", "priority": 3},
    {"id": "TG-019", "description": "Build performance benchmark suite for validation latency", "priority": 3},
    {"id": "TG-020", "description": "Generate API documentation for triple gate system", "priority": 3},

    # Phase 3: System Hardening
    {"id": "SH-001", "description": "Add error recovery mechanisms to all three gates", "priority": 1},
    {"id": "SH-002", "description": "Implement circuit breaker pattern for external API calls", "priority": 1},
    {"id": "SH-003", "description": "Add metrics collection for validation performance", "priority": 2},
    {"id": "SH-004", "description": "Build health check endpoints for each gate", "priority": 2},
    {"id": "SH-005", "description": "Create admin dashboard data for validation monitoring", "priority": 2},
    {"id": "SH-006", "description": "Write integration tests for gate transitions", "priority": 2},
    {"id": "SH-007", "description": "Perform optimization pass on validation bottlenecks", "priority": 2},
    {"id": "SH-008", "description": "Security audit checklist for validation inputs", "priority": 2},
    {"id": "SH-009", "description": "Create deployment scripts for production gates", "priority": 3},
    {"id": "SH-010", "description": "Final documentation for triple gate API", "priority": 3},

    # Bonus: Knowledge Graph Enhancement
    {"id": "KG-001", "description": "Build entity relationships between all 9 patents", "priority": 2},
    {"id": "KG-002", "description": "Create patent claim cross-reference matrix", "priority": 2},
    {"id": "KG-003", "description": "Generate patent synergy analysis report", "priority": 2},
    {"id": "KG-004", "description": "Build prior art linkage graph", "priority": 2},
    {"id": "KG-005", "description": "Create patent-to-revenue pathway mapping", "priority": 2},
]


def get_task_hash(task_id: str) -> str:
    """Generate unique hash for task."""
    timestamp = str(int(datetime.now().timestamp()))
    return hashlib.md5(f"{task_id}_{timestamp}".encode()).hexdigest()[:12]


def enqueue_task(r: redis.Redis, task: dict) -> bool:
    """Enqueue a single task using sorted set (zset)."""
    task_data = {
        "id": f"{task['id']}_{get_task_hash(task['id'])}",
        "description": task["description"],
        "priority": task.get("priority", 2),
        "created_at": datetime.now().isoformat(),
        "status": "pending"
    }

    try:
        # Use zadd with priority as score (lower priority = higher in queue)
        score = task.get("priority", 2)
        r.zadd("genesis:task_queue", {json.dumps(task_data): score})
        logger.info(f"Enqueued: {task['id']} - {task['description'][:50]}...")
        return True
    except Exception as e:
        logger.error(f"Failed to enqueue {task['id']}: {e}")
        return False


def check_and_refill(threshold: int = 10, batch_size: int = 20):
    """Check queue depth and refill if below threshold."""
    try:
        # Use Elestio Redis (same as AIVA orchestrator)
        r = redis.Redis(
            host=os.environ.get("GENESIS_REDIS_HOST", "redis-genesis-u50607.vm.elestio.app"),
            port=int(os.environ.get("GENESIS_REDIS_PORT", 26379)),
            password=os.environ.get("GENESIS_REDIS_PASSWORD", ""),
            decode_responses=True
        )
        r.ping()
    except Exception as e:
        logger.error(f"Redis connection failed: {e}")
        return

    # Queue is a sorted set (zset)
    pending = r.zcard("genesis:task_queue")
    logger.info(f"Current queue depth: {pending}")

    if pending < threshold:
        logger.info(f"Queue below threshold ({threshold}) - adding {batch_size} tasks")

        # Get already queued task IDs to avoid duplicates
        queued = []
        for task_json in r.zrange("genesis:task_queue", 0, -1):
            try:
                task = json.loads(task_json)
                # Extract base ID (before the hash)
                base_id = task.get("id", "").split("_")[0]
                queued.append(base_id)
            except:
                pass

        # Add tasks that aren't already queued
        added = 0
        for task in ALL_TASKS:
            if added >= batch_size:
                break
            if task["id"] not in queued:
                if enqueue_task(r, task):
                    added += 1
                    queued.append(task["id"])

        logger.info(f"Added {added} tasks to queue")
    else:
        logger.info("Queue is healthy - no refill needed")


def main():
    import argparse
    parser = argparse.ArgumentParser(description="Queue Refiller")
    parser.add_argument("--threshold", type=int, default=10, help="Min queue depth")
    parser.add_argument("--batch", type=int, default=20, help="Tasks to add when low")
    parser.add_argument("--loop", action="store_true", help="Run continuously")
    parser.add_argument("--interval", type=int, default=300, help="Check interval in seconds")
    args = parser.parse_args()

    if args.loop:
        import time
        logger.info(f"Starting continuous refiller (interval: {args.interval}s)")
        while True:
            check_and_refill(args.threshold, args.batch)
            time.sleep(args.interval)
    else:
        check_and_refill(args.threshold, args.batch)


if __name__ == "__main__":
    main()
