#!/usr/bin/env python3
"""
Jules Pro 24/7 Task Feeder
===========================
Continuously submits coding tasks to Google Jules (jules.google.com)
via GitHub Issues on the genesis-system repo.

Account: sunvision07@gmail.com (Jules Pro)
GitHub repo: Kinan27/genesis-system
Jules limit: 15 concurrent tasks, 100/day

Usage:
    python3 scripts/jules_24_7_feeder.py --submit-batch
    python3 scripts/jules_24_7_feeder.py --generate 5
    python3 scripts/jules_24_7_feeder.py --loop
    python3 scripts/jules_24_7_feeder.py --status
"""

import os
import sys
import json
import re
import time
import logging
import argparse
import subprocess
import datetime
from pathlib import Path
from typing import Optional

# ─── PATHS (E: DRIVE ONLY) ────────────────────────────────────────────────────
REPO_ROOT         = Path("/mnt/e/genesis-system")
BATCH_FILE        = REPO_ROOT / "hive" / "JULES_TASK_BATCH_MAX_COMPUTE.md"
TRACKER_FILE      = REPO_ROOT / "data" / "jules_task_tracker.json"
LOG_FILE          = REPO_ROOT / "data" / "logs" / "jules_feeder.log"
ISSUE_QUEUE_DIR   = REPO_ROOT / "data" / "jules_submitted"
GEMINI_BIN        = "/home/authentic88/.nvm/versions/node/v22.22.0/bin/gemini"

# ─── GITHUB CONFIG ────────────────────────────────────────────────────────────
GITHUB_REPO       = "Kinan27/genesis-system"
GITHUB_PAT        = "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"
JULES_LABEL       = "jules"
AUTO_LABEL        = "jules-auto"
JULES_DAILY_LIMIT = 100
LOOP_INTERVAL_MIN = 15
MAX_CONCURRENT    = 15

# ─── GEMINI CONFIG ────────────────────────────────────────────────────────────
GEMINI_API_KEY    = os.environ.get("GEMINI_API_KEY", "AIzaSyALfbAdHfJ6aRnqNyiTRmKmGVoena1JsdU")
GEMINI_MODEL      = "gemini-2.0-flash"

# ─── LOGGING ──────────────────────────────────────────────────────────────────
LOG_FILE.parent.mkdir(parents=True, exist_ok=True)
logging.basicConfig(
    level=logging.INFO,
    format="%(asctime)s [%(levelname)s] %(message)s",
    handlers=[
        logging.FileHandler(str(LOG_FILE)),
        logging.StreamHandler(sys.stdout),
    ],
)
log = logging.getLogger("jules_feeder")


# ══════════════════════════════════════════════════════════════════════════════
# TRACKER
# ══════════════════════════════════════════════════════════════════════════════

def load_tracker() -> dict:
    """Load or initialise the task tracker JSON."""
    TRACKER_FILE.parent.mkdir(parents=True, exist_ok=True)
    if TRACKER_FILE.exists():
        try:
            return json.loads(TRACKER_FILE.read_text())
        except json.JSONDecodeError:
            log.warning("Tracker corrupted — resetting.")
    return {
        "meta": {
            "created": datetime.date.today().isoformat(),
            "repo": GITHUB_REPO,
            "total_submitted": 0,
            "total_completed": 0,
        },
        "daily": {},
        "tasks": {},
    }


def save_tracker(tracker: dict) -> None:
    TRACKER_FILE.write_text(json.dumps(tracker, indent=2))


def today_key() -> str:
    return datetime.date.today().isoformat()


def today_count(tracker: dict) -> int:
    return tracker["daily"].get(today_key(), 0)


def increment_daily(tracker: dict) -> None:
    key = today_key()
    tracker["daily"][key] = tracker["daily"].get(key, 0) + 1
    tracker["meta"]["total_submitted"] = tracker["meta"].get("total_submitted", 0) + 1


def can_submit_today(tracker: dict) -> bool:
    return today_count(tracker) < JULES_DAILY_LIMIT


# ══════════════════════════════════════════════════════════════════════════════
# GITHUB ISSUES API  (with graceful degradation)
# ══════════════════════════════════════════════════════════════════════════════

def _github_headers() -> dict:
    return {
        "Authorization": f"token {GITHUB_PAT}",
        "Accept": "application/vnd.github+json",
        "Content-Type": "application/json",
    }


def _ensure_label(label: str) -> None:
    """Create label if it doesn't exist. Silently skip if no permission."""
    try:
        import urllib.request
        url = f"https://api.github.com/repos/{GITHUB_REPO}/labels"
        data = json.dumps({"name": label, "color": "0075ca"}).encode()
        req = urllib.request.Request(url, data=data, headers=_github_headers(), method="POST")
        urllib.request.urlopen(req, timeout=10)
    except Exception:
        pass  # Label exists or no permission — both are fine


def create_github_issue(title: str, body: str, labels: list[str]) -> Optional[str]:
    """
    Create a GitHub issue. Returns URL on success, None on failure.
    Falls back gracefully if PAT lacks issues scope.
    """
    import urllib.request, urllib.error
    url = f"https://api.github.com/repos/{GITHUB_REPO}/issues"
    payload = {
        "title": f"[Jules] {title}",
        "body": body,
        "labels": labels,
    }
    data = json.dumps(payload).encode()
    req = urllib.request.Request(url, data=data, headers=_github_headers(), method="POST")
    try:
        with urllib.request.urlopen(req, timeout=15) as resp:
            result = json.loads(resp.read())
            url_out = result.get("html_url", "")
            log.info(f"  GitHub issue created: {url_out}")
            return url_out
    except urllib.error.HTTPError as e:
        body_text = e.read().decode()
        if "403" in str(e.code) or "Resource not accessible" in body_text:
            log.warning("  GitHub issues API: PAT lacks 'issues' scope. "
                        "Falling back to local file submission.")
        else:
            log.error(f"  GitHub API error {e.code}: {body_text[:200]}")
        return None
    except Exception as exc:
        log.error(f"  GitHub API exception: {exc}")
        return None


def get_open_jules_issues() -> list[dict]:
    """Return open Jules issues from GitHub. Returns [] if PAT lacks permission."""
    import urllib.request, urllib.error
    url = f"https://api.github.com/repos/{GITHUB_REPO}/issues?state=open&labels={JULES_LABEL}&per_page=100"
    req = urllib.request.Request(url, headers=_github_headers())
    try:
        with urllib.request.urlopen(req, timeout=15) as resp:
            issues = json.loads(resp.read())
            if isinstance(issues, list):
                return issues
    except Exception:
        pass
    return []


# ══════════════════════════════════════════════════════════════════════════════
# FALLBACK: PUSH ISSUE FILE TO REPO
# ══════════════════════════════════════════════════════════════════════════════

def save_issue_locally(task_id: str, title: str, body: str, labels: list[str]) -> str:
    """
    When GitHub API lacks permission, save the issue as a markdown file
    in data/jules_submitted/. These can be batch-pasted to jules.google.com
    or created via browser automation.
    """
    ISSUE_QUEUE_DIR.mkdir(parents=True, exist_ok=True)
    safe_id = re.sub(r"[^a-z0-9_-]", "_", task_id.lower())
    filepath = ISSUE_QUEUE_DIR / f"{safe_id}.md"
    content = f"""# [Jules] {title}

**Task ID**: {task_id}
**Labels**: {', '.join(labels)}
**Created**: {datetime.datetime.now().isoformat()}
**Repo**: {GITHUB_REPO}

---

{body}
"""
    filepath.write_text(content)
    log.info(f"  Saved locally: {filepath}")
    return f"local:{filepath}"


# ══════════════════════════════════════════════════════════════════════════════
# BATCH FILE PARSER
# ══════════════════════════════════════════════════════════════════════════════

def parse_batch_file(filepath: Path) -> list[dict]:
    """
    Parse JULES_TASK_BATCH_MAX_COMPUTE.md into a list of task dicts.

    Format (from the file):
        ### Task N: Title
        **Title**: ...
        **Description**: ...
        **Files to read**: ...
        **Acceptance Criteria**: ...
    """
    if not filepath.exists():
        log.error(f"Batch file not found: {filepath}")
        return []

    content = filepath.read_text()
    tasks = []

    # Split on task headings
    sections = re.split(r"(?=---\n\n### Task \d+:)", content)

    for section in sections[1:]:  # skip preamble
        task = _parse_task_section(section)
        if task:
            tasks.append(task)

    log.info(f"Parsed {len(tasks)} tasks from {filepath.name}")
    return tasks


def _parse_task_section(section: str) -> Optional[dict]:
    """Parse a single task section from the batch MD file."""
    # Extract task number and title from heading
    heading_match = re.search(r"### Task (\d+): (.+)", section)
    if not heading_match:
        return None

    num = int(heading_match.group(1))
    heading_title = heading_match.group(2).strip()

    # Extract **Title** field
    title_match = re.search(r"\*\*Title\*\*:\s*(.+)", section)
    title = title_match.group(1).strip() if title_match else heading_title

    # Extract **Description** block
    desc_match = re.search(
        r"\*\*Description\*\*:\s*\n(.*?)(?=\n\*\*Files to read\*\*|\n\*\*Acceptance|\n---|\Z)",
        section, re.DOTALL
    )
    description = desc_match.group(1).strip() if desc_match else ""

    # Extract **Files to read**
    files_match = re.search(
        r"\*\*Files to read\*\*:\s*\n(.*?)(?=\n\*\*Acceptance|\n\*\*Labels|\n---|\Z)",
        section, re.DOTALL
    )
    files_to_read = files_match.group(1).strip() if files_match else ""

    # Extract **Acceptance Criteria**
    accept_match = re.search(
        r"\*\*Acceptance Criteria\*\*:\s*\n(.*?)(?=\n\*\*Labels|\n\*\*Wave|\n---|\Z)",
        section, re.DOTALL
    )
    acceptance = accept_match.group(1).strip() if accept_match else ""

    # Extract **Labels** (if present)
    labels_match = re.search(r"\*\*Labels\*\*:\s*(.+)", section)
    extra_labels = []
    if labels_match:
        extra_labels = [l.strip() for l in labels_match.group(1).split(",") if l.strip()]

    # Determine wave
    wave = 1 if num <= 15 else 2

    return {
        "id": f"JULES-BATCH-{num:03d}",
        "num": num,
        "wave": wave,
        "title": title,
        "description": description,
        "files_to_read": files_to_read,
        "acceptance": acceptance,
        "labels": [JULES_LABEL, f"wave-{wave}"] + extra_labels,
        "status": "pending",
    }


def format_issue_body(task: dict) -> str:
    """Format a task dict into a Jules-ready GitHub issue body."""
    parts = [
        f"## Jules Task: {task['title']}",
        "",
        "### Context",
        "This task was auto-generated by the Genesis Jules 24/7 Feeder.",
        f"**Wave**: {task['wave']} | **Task ID**: {task['id']}",
        "",
        "### Task Description",
        task["description"] or "_No description provided._",
        "",
    ]

    files_to_read = task.get("files_to_read", "")
    if files_to_read:
        if isinstance(files_to_read, list):
            files_to_read = "\n".join(f"- {f}" for f in files_to_read)
        parts += [
            "### Files to Read",
            str(files_to_read),
            "",
        ]

    acceptance = task.get("acceptance", "")
    if acceptance:
        if isinstance(acceptance, list):
            acceptance = "\n".join(f"{i+1}. {a}" for i, a in enumerate(acceptance))
        parts += [
            "### Acceptance Criteria",
            str(acceptance),
            "",
        ]

    parts += [
        "---",
        "### Genesis Rules (MANDATORY for Jules)",
        "- Use **E: drive paths** only — never C: drive",
        "- Use **PostgreSQL** via `elestio_config.PostgresConfig` — never SQLite",
        "- Add **VERIFICATION_STAMP** comment to every completed file",
        "- Write **black box + white box tests** for every story",
        "- All tests must pass before marking complete",
        "",
        "### Do NOT modify",
        "- Any AIVA-related files",
        "- `.env` or credential files",
        "- `core/genesis_execution_layer.py` (without explicit approval)",
        "",
        f"_Auto-submitted by Jules 24/7 Feeder at {datetime.datetime.now().isoformat()}_",
    ]

    return "\n".join(parts)


# ══════════════════════════════════════════════════════════════════════════════
# TASK GENERATION (Gemini Flash)
# ══════════════════════════════════════════════════════════════════════════════

def generate_tasks_with_gemini(n: int) -> list[dict]:
    """
    Use Gemini Flash API to generate N new coding tasks based on
    Genesis codebase priorities from the Knowledge Graph.
    """
    log.info(f"Generating {n} new tasks using Gemini Flash...")

    # Build context from KG priorities
    kg_context = _load_kg_context()

    prompt = f"""You are a senior software architect for Genesis, an autonomous AI orchestration system.

Based on these Genesis codebase priorities and gaps:

{kg_context}

Generate {n} specific, atomic coding tasks formatted as JSON array.

Each task must have these fields:
- id: "JULES-GEN-NNN" (sequential)
- title: concise title (under 80 chars)
- description: detailed description with numbered steps (what to build, how)
- files_to_read: list of existing files to reference
- acceptance: numbered acceptance criteria (3-5 items)
- labels: list of relevant labels (include "jules")
- wave: 1 (all generated tasks are wave 1)

RULES for tasks:
- Each task = one file, one class, one concern
- Must use PostgreSQL via elestio_config (NEVER SQLite)
- Must use E: drive paths (/mnt/e/genesis-system/)
- Must include test requirements
- Must be completable by Jules in <2 hours
- Focus on: memory pipeline, testing coverage, API integrations, voice widget, tradie scraping

Return ONLY valid JSON array, no markdown wrapper."""

    import urllib.request
    url = f"https://generativelanguage.googleapis.com/v1beta/models/{GEMINI_MODEL}:generateContent?key={GEMINI_API_KEY}"
    payload = {
        "contents": [{"parts": [{"text": prompt}]}],
        "generationConfig": {"temperature": 0.7, "maxOutputTokens": 8192},
    }
    data = json.dumps(payload).encode()
    req = urllib.request.Request(url, data=data, headers={"Content-Type": "application/json"})

    try:
        with urllib.request.urlopen(req, timeout=60) as resp:
            result = json.loads(resp.read())
            text = result["candidates"][0]["content"]["parts"][0]["text"]
            # Strip markdown code fences if present
            text = re.sub(r"```json\s*", "", text)
            text = re.sub(r"```\s*", "", text)
            tasks = json.loads(text.strip())
            log.info(f"Gemini generated {len(tasks)} tasks")
            return tasks
    except json.JSONDecodeError as e:
        log.error(f"Gemini response not valid JSON: {e}")
        return []
    except Exception as e:
        log.error(f"Gemini API error: {e}")
        return []


def _load_kg_context() -> str:
    """Load a brief summary of Genesis priorities from the KG."""
    context_parts = []

    # Read axioms gap file
    gaps_dir = REPO_ROOT / "KNOWLEDGE_GRAPH" / "gaps"
    if gaps_dir.exists():
        for f in list(gaps_dir.glob("*.md"))[:3]:
            try:
                context_parts.append(f"=== {f.name} ===\n{f.read_text()[:800]}")
            except Exception:
                pass

    # Read recent failure axioms for task hints
    axioms_dir = REPO_ROOT / "KNOWLEDGE_GRAPH" / "axioms"
    if axioms_dir.exists():
        failure_files = sorted(axioms_dir.glob("failure_axioms_*.jsonl"), reverse=True)
        for f in failure_files[:2]:
            try:
                lines = f.read_text().splitlines()[:10]
                context_parts.append(f"=== {f.name} ===\n" + "\n".join(lines))
            except Exception:
                pass

    if not context_parts:
        context_parts = [
            "Priority: Memory bloodstream pipeline testing and verification",
            "Priority: Browser agent automation framework",
            "Priority: GHL workflow automation tests",
            "Priority: Tradie lead scraper CLI",
            "Priority: Voice bridge production hardening",
        ]

    return "\n\n".join(context_parts)


# ══════════════════════════════════════════════════════════════════════════════
# SUBMISSION ENGINE
# ══════════════════════════════════════════════════════════════════════════════

def submit_task(task: dict, tracker: dict, dry_run: bool = False) -> bool:
    """
    Submit a single task to Jules via GitHub Issues.
    Returns True on success (or dry-run).
    """
    task_id = task["id"]
    title   = task["title"]
    labels  = task.get("labels", [JULES_LABEL])

    # Skip if already submitted
    if task_id in tracker["tasks"]:
        existing = tracker["tasks"][task_id]
        if existing.get("status") in ("submitted", "completed"):
            log.debug(f"  Skipping {task_id}: already {existing['status']}")
            return False

    if not can_submit_today(tracker):
        log.warning(f"Daily limit reached ({JULES_DAILY_LIMIT}/day). Stopping.")
        return False

    log.info(f"Submitting: [{task_id}] {title}")

    if dry_run:
        log.info(f"  [DRY RUN] Would submit: {title}")
        return True

    body = format_issue_body(task)

    # Try GitHub Issues API first
    issue_url = create_github_issue(title, body, labels)

    if issue_url:
        submission_mode = "github_issue"
    else:
        # Fallback: save locally for manual upload / browser automation
        issue_url = save_issue_locally(task_id, title, body, labels)
        submission_mode = "local_file"

    # Record in tracker
    tracker["tasks"][task_id] = {
        "id": task_id,
        "title": title,
        "status": "submitted",
        "submitted_at": datetime.datetime.now().isoformat(),
        "submitted_on": today_key(),
        "url": issue_url,
        "mode": submission_mode,
        "wave": task.get("wave", 1),
        "labels": labels,
    }
    increment_daily(tracker)

    log.info(f"  Submitted via {submission_mode}: {issue_url}")
    return True


def submit_batch(tasks: list[dict], tracker: dict, dry_run: bool = False) -> int:
    """Submit all tasks from the batch file. Returns number submitted."""
    submitted = 0
    skipped   = 0

    log.info(f"Starting batch submission of {len(tasks)} tasks...")
    log.info(f"Daily budget remaining: {JULES_DAILY_LIMIT - today_count(tracker)}")

    for task in tasks:
        if not can_submit_today(tracker):
            log.warning(f"Daily limit reached. Submitted {submitted}, skipped {skipped}.")
            break

        # Rate limit: 1 issue per 2 seconds to avoid GitHub secondary rate limits
        if submitted > 0:
            time.sleep(2)

        result = submit_task(task, tracker, dry_run=dry_run)
        if result:
            submitted += 1
        else:
            skipped += 1

    save_tracker(tracker)
    log.info(f"Batch complete. Submitted: {submitted}, Skipped: {skipped}")
    return submitted


# ══════════════════════════════════════════════════════════════════════════════
# STATUS DISPLAY
# ══════════════════════════════════════════════════════════════════════════════

def show_status(tracker: dict) -> None:
    """Print a clean status report."""
    meta  = tracker.get("meta", {})
    tasks = tracker.get("tasks", {})
    daily = tracker.get("daily", {})

    print("\n" + "=" * 60)
    print("JULES 24/7 FEEDER — STATUS")
    print("=" * 60)
    print(f"Repo:            {GITHUB_REPO}")
    print(f"Total submitted: {meta.get('total_submitted', 0)}")
    print(f"Total completed: {meta.get('total_completed', 0)}")
    print(f"Today ({today_key()}): {today_count(tracker)}/{JULES_DAILY_LIMIT}")
    print()

    # Group by status
    by_status: dict[str, list] = {}
    for t in tasks.values():
        s = t.get("status", "unknown")
        by_status.setdefault(s, []).append(t)

    for status, items in sorted(by_status.items()):
        print(f"[{status.upper()}] {len(items)} tasks:")
        for t in items[-5:]:  # show last 5 per status
            mode = t.get("mode", "")
            print(f"  {t['id']:25s} {t['title'][:45]} [{mode}]")
        if len(items) > 5:
            print(f"  ... and {len(items) - 5} more")
        print()

    # Daily submission history
    if daily:
        print("Daily history (last 7 days):")
        for date in sorted(daily.keys())[-7:]:
            bar = "#" * daily[date]
            print(f"  {date}: {bar} ({daily[date]})")
    print("=" * 60)

    # Show pending local files
    if ISSUE_QUEUE_DIR.exists():
        local_files = list(ISSUE_QUEUE_DIR.glob("*.md"))
        if local_files:
            print(f"\nLocal issue files (need manual upload or browser agent):")
            for f in local_files:
                print(f"  {f.name}")
            print(f"  Location: {ISSUE_QUEUE_DIR}")


# ══════════════════════════════════════════════════════════════════════════════
# LOOP MODE — 24/7 CONTINUOUS OPERATION
# ══════════════════════════════════════════════════════════════════════════════

def run_loop(tracker: dict, dry_run: bool = False) -> None:
    """
    24/7 loop mode:
    1. Check batch file for unsubmitted tasks → submit them
    2. When batch exhausted → generate new tasks via Gemini Flash
    3. Sleep LOOP_INTERVAL_MIN minutes
    4. Repeat indefinitely
    """
    log.info(f"Starting 24/7 loop (interval: {LOOP_INTERVAL_MIN} min)...")
    log.info("Press Ctrl+C to stop.")

    cycle = 0
    while True:
        cycle += 1
        log.info(f"\n{'─' * 50}")
        log.info(f"Loop cycle {cycle} — {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
        log.info(f"Daily budget: {today_count(tracker)}/{JULES_DAILY_LIMIT}")

        if not can_submit_today(tracker):
            next_day = datetime.datetime.now().replace(
                hour=0, minute=0, second=0, microsecond=0
            ) + datetime.timedelta(days=1)
            sleep_secs = (next_day - datetime.datetime.now()).seconds + 60
            log.info(f"Daily limit reached. Sleeping until {next_day.strftime('%Y-%m-%d %H:%M')} "
                     f"({sleep_secs // 3600}h {(sleep_secs % 3600) // 60}m)...")
            time.sleep(sleep_secs)
            tracker = load_tracker()
            continue

        # Step 1: Submit pending batch tasks
        batch_tasks = parse_batch_file(BATCH_FILE)
        submitted_ids = set(tracker["tasks"].keys())
        pending = [t for t in batch_tasks if t["id"] not in submitted_ids]

        if pending:
            log.info(f"Found {len(pending)} unsubmitted batch tasks. Submitting up to 5 per cycle...")
            to_submit = pending[:5]  # max 5 per cycle to avoid rate limits
            for task in to_submit:
                if not can_submit_today(tracker):
                    break
                submit_task(task, tracker, dry_run=dry_run)
                time.sleep(2)
            save_tracker(tracker)
        else:
            # Step 2: Batch exhausted → generate new tasks
            log.info("Batch tasks all submitted. Generating new tasks via Gemini Flash...")
            new_tasks = generate_tasks_with_gemini(5)
            if new_tasks:
                for task in new_tasks:
                    if not can_submit_today(tracker):
                        break
                    # Normalize generated task format
                    if "id" not in task:
                        task["id"] = f"JULES-GEN-{int(time.time())}"
                    if "labels" not in task:
                        task["labels"] = [JULES_LABEL, AUTO_LABEL]
                    if "wave" not in task:
                        task["wave"] = 1
                    submit_task(task, tracker, dry_run=dry_run)
                    time.sleep(2)
                save_tracker(tracker)
            else:
                log.warning("Gemini task generation failed. Will retry next cycle.")

        # Step 3: Check status of open Jules issues
        open_issues = get_open_jules_issues()
        if open_issues:
            log.info(f"Open Jules issues on GitHub: {len(open_issues)}")

        # Sleep until next cycle
        sleep_secs = LOOP_INTERVAL_MIN * 60
        log.info(f"Cycle complete. Sleeping {LOOP_INTERVAL_MIN} minutes...")
        time.sleep(sleep_secs)
        tracker = load_tracker()  # reload in case external updates


# ══════════════════════════════════════════════════════════════════════════════
# CLI ENTRY POINT
# ══════════════════════════════════════════════════════════════════════════════

def main() -> None:
    parser = argparse.ArgumentParser(
        description="Jules Pro 24/7 Task Feeder — Genesis System",
        formatter_class=argparse.RawDescriptionHelpFormatter,
        epilog="""
Examples:
  Submit all 30 batch tasks immediately:
    python3 scripts/jules_24_7_feeder.py --submit-batch

  Generate 5 new tasks via Gemini Flash and submit:
    python3 scripts/jules_24_7_feeder.py --generate 5

  Start 24/7 loop (for tmux session):
    python3 scripts/jules_24_7_feeder.py --loop

  Check submission status:
    python3 scripts/jules_24_7_feeder.py --status

  Dry run (preview without submitting):
    python3 scripts/jules_24_7_feeder.py --submit-batch --dry-run
        """,
    )

    parser.add_argument(
        "--submit-batch",
        action="store_true",
        help=f"Submit all tasks from {BATCH_FILE.name}",
    )
    parser.add_argument(
        "--generate",
        type=int,
        metavar="N",
        help="Generate N new tasks via Gemini Flash and submit them",
    )
    parser.add_argument(
        "--loop",
        action="store_true",
        help=f"Run 24/7 loop (checks every {LOOP_INTERVAL_MIN} min)",
    )
    parser.add_argument(
        "--status",
        action="store_true",
        help="Show submission status",
    )
    parser.add_argument(
        "--dry-run",
        action="store_true",
        help="Preview actions without actually submitting",
    )
    parser.add_argument(
        "--wave",
        type=int,
        choices=[1, 2],
        help="Filter to only submit tasks from a specific wave (1 or 2)",
    )

    args = parser.parse_args()

    if not any([args.submit_batch, args.generate, args.loop, args.status]):
        parser.print_help()
        sys.exit(0)

    tracker = load_tracker()

    # Ensure labels exist (best-effort)
    _ensure_label(JULES_LABEL)
    _ensure_label(AUTO_LABEL)
    _ensure_label("wave-1")
    _ensure_label("wave-2")

    if args.status:
        show_status(tracker)

    if args.submit_batch:
        if args.dry_run:
            log.info("[DRY RUN] No issues will actually be created.")
        tasks = parse_batch_file(BATCH_FILE)
        if args.wave:
            tasks = [t for t in tasks if t.get("wave") == args.wave]
            log.info(f"Filtered to wave {args.wave}: {len(tasks)} tasks")
        submit_batch(tasks, tracker, dry_run=args.dry_run)

    if args.generate:
        n = args.generate
        if args.dry_run:
            log.info(f"[DRY RUN] Would generate {n} tasks via Gemini Flash.")
        else:
            new_tasks = generate_tasks_with_gemini(n)
            log.info(f"Generated {len(new_tasks)} tasks. Submitting...")
            for task in new_tasks:
                if not can_submit_today(tracker):
                    log.warning("Daily limit reached during generation run.")
                    break
                if "id" not in task:
                    task["id"] = f"JULES-GEN-{int(time.time())}"
                if "labels" not in task:
                    task["labels"] = [JULES_LABEL, AUTO_LABEL]
                if "wave" not in task:
                    task["wave"] = 1
                submit_task(task, tracker, dry_run=False)
                time.sleep(2)
            save_tracker(tracker)
            log.info("Generation + submission complete.")

    if args.loop:
        try:
            run_loop(tracker, dry_run=args.dry_run)
        except KeyboardInterrupt:
            log.info("\nLoop stopped by user.")
            save_tracker(tracker)


if __name__ == "__main__":
    main()
