#!/usr/bin/env python3
"""
Terminal Bridge - Routes to Claude API with proper OpenAI-compatible response format.
"""
import json
import os
import httpx
from datetime import datetime
from fastapi import FastAPI
from pydantic import BaseModel
from typing import List, Optional
import uvicorn
import time

app = FastAPI(title="Genesis Terminal Bridge")

ANTHROPIC_API_KEY = os.environ.get("ANTHROPIC_API_KEY", "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")
LOG_FILE = "/mnt/e/genesis-system/litellm/terminal_messages.log"

SYSTEM_PROMPT = """You are Claude, the Lead Architect of Genesis-OS, responding through the Terminal Bridge.
You have full context of the Genesis system and are helping Kinan with development tasks.
Be concise and helpful."""

class Message(BaseModel):
    role: str
    content: str

class ChatRequest(BaseModel):
    model: str
    messages: List[Message]
    max_tokens: Optional[int] = 2000
    stream: Optional[bool] = False

def log_message(direction: str, content: str):
    timestamp = datetime.now().strftime("%H:%M:%S")
    log_line = f"[{timestamp}] {direction}: {content[:200]}{'...' if len(content) > 200 else ''}"
    print(log_line)
    with open(LOG_FILE, "a") as f:
        f.write(log_line + "\n")

@app.get("/health")
async def health():
    return {"status": "ok", "mode": "auto-forward"}

@app.get("/v1/models")
async def list_models():
    return {
        "data": [{"id": "terminal-claude", "object": "model", "created": 1677610602, "owned_by": "genesis"}],
        "object": "list"
    }

@app.post("/v1/chat/completions")
async def chat_completions(request: ChatRequest):
    user_message = ""
    for msg in reversed(request.messages):
        if msg.role == "user":
            user_message = msg.content
            break

    log_message("📥 USER", user_message)

    messages = [{"role": "user", "content": f"{SYSTEM_PROMPT}\n\nUser: {user_message}"}]

    async with httpx.AsyncClient(timeout=120) as client:
        response = await client.post(
            "https://api.anthropic.com/v1/messages",
            headers={
                "x-api-key": ANTHROPIC_API_KEY,
                "anthropic-version": "2023-06-01",
                "content-type": "application/json"
            },
            json={
                "model": "claude-sonnet-4-20250514",
                "max_tokens": request.max_tokens or 2000,
                "messages": messages
            }
        )

        if response.status_code != 200:
            error_msg = f"API error: {response.text}"
            log_message("❌ ERROR", error_msg)
            return {"error": {"message": error_msg, "type": "api_error", "code": response.status_code}}

        result = response.json()
        assistant_content = result.get("content", [{}])[0].get("text", "No response")
        input_tokens = result.get("usage", {}).get("input_tokens", 0)
        output_tokens = result.get("usage", {}).get("output_tokens", 0)

        log_message("📤 CLAUDE", assistant_content)

        # Full OpenAI-compatible response format
        return {
            "id": f"chatcmpl-{int(time.time()*1000)}",
            "object": "chat.completion",
            "created": int(time.time()),
            "model": "terminal-claude",
            "system_fingerprint": "genesis-bridge-v1",
            "choices": [{
                "index": 0,
                "message": {
                    "role": "assistant",
                    "content": assistant_content
                },
                "logprobs": None,
                "finish_reason": "stop"
            }],
            "usage": {
                "prompt_tokens": input_tokens,
                "completion_tokens": output_tokens,
                "total_tokens": input_tokens + output_tokens
            }
        }

if __name__ == "__main__":
    print("Terminal Bridge started on port 4001")
    uvicorn.run(app, host="0.0.0.0", port=4001, log_level="warning")
