
import os
import glob
import json
import logging
from pathlib import Path
import google.generativeai as genai

# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# --- Configuration ---
DEEP_THINK_DIR = "deep_think_results"
AXIOMS_DIR = "KNOWLEDGE_GRAPH/axioms"
CACHE_MODEL = "gemini-1.5-flash-001" # Use a model that supports context caching
CACHE_DISPLAY_NAME = "deep_think_frameworks_and_axioms"
CACHE_TTL_SECONDS = 86400  # 24 hours
CACHE_NAME_FILE = ".gemini/context_cache_name.txt"

def read_all_content() -> str:
    """Reads all Deep Think frameworks and axioms from the specified directories."""
    all_content = []
    
    # Read Deep Think frameworks
    logger.info(f"Reading Deep Think frameworks from {DEEP_THINK_DIR}...")
    framework_files = (glob.glob(f"{DEEP_THINK_DIR}/**/*.md", recursive=True) + 
                       glob.glob(f"{DEEP_THINK_DIR}/**/*.json", recursive=True))
    
    for file_path in framework_files:
        try:
            with open(file_path, 'r', encoding='utf-8') as f:
                all_content.append(f.read())
        except Exception as e:
            logger.warning(f"Could not read {file_path}: {e}")

    # Read axioms
    logger.info(f"Reading axioms from {AXIOMS_DIR}...")
    axiom_files = (glob.glob(f"{AXIOMS_DIR}/**/*.md", recursive=True) + 
                   glob.glob(f"{AXIOMS_DIR}/**/*.jsonl", recursive=True))

    for file_path in axiom_files:
        try:
            with open(file_path, 'r', encoding='utf-8') as f:
                if file_path.endswith(".jsonl"):
                    for line in f:
                        try:
                            data = json.loads(line)
                            all_content.append(json.dumps(data, indent=2))
                        except json.JSONDecodeError:
                            all_content.append(line) # Append raw line if not valid JSON
                else:
                    all_content.append(f.read())
        except Exception as e:
            logger.warning(f"Could not read {file_path}: {e}")

    logger.info(f"Read {len(framework_files)} framework files and {len(axiom_files)} axiom files.")
    return "\n\n---\n\n".join(all_content)

def create_context_cache(content: str):
    """Creates a Gemini Context Cache from the provided content."""
    logger.info("Creating Gemini Context Cache...")
    logger.info(f"Model: {CACHE_MODEL}")
    logger.info(f"Display Name: {CACHE_DISPLAY_NAME}")
    logger.info(f"Content length: {len(content)} characters")

    # In a real scenario, you would make the API call:
    # try:
    #     api_key = os.environ.get("GEMINI_API_KEY")
    #     if not api_key:
    #         raise ValueError("GEMINI_API_KEY environment variable not set.")
    #
    #     genai.configure(api_key=api_key)
    #
    #     cached_content = genai.caching.CachedContent.create(
    #         model=CACHE_MODEL,
    #         display_name=CACHE_DISPLAY_NAME,
    #         system_instruction="You are a helpful AI assistant with deep knowledge of the Genesis system.",
    #         contents=[content],
    #         ttl=datetime.timedelta(seconds=CACHE_TTL_SECONDS),
    #     )
    #     cache_name = cached_content.name
    #     logger.info(f"Successfully created Gemini Context Cache. Name: {cache_name}")
    #
    #     # Store the cache name
    #     with open(CACHE_NAME_FILE, 'w') as f:
    #         f.write(cache_name)
    #     logger.info(f"Cache name saved to {CACHE_NAME_FILE}")
    #
    # except Exception as e:
    #     logger.error(f"Failed to create Gemini Context Cache: {e}")

    # For PoC, simulate the cache creation and save a dummy name
    cache_name = "cachedContents/dummy-cache-12345"
    logger.info(f"PoC Mode: Simulating cache creation. Dummy name: {cache_name}")
    os.makedirs(os.path.dirname(CACHE_NAME_FILE), exist_ok=True)
    with open(CACHE_NAME_FILE, 'w') as f:
        f.write(cache_name)
    logger.info(f"PoC Mode: Dummy cache name saved to {CACHE_NAME_FILE}")


if __name__ == "__main__":
    combined_content = read_all_content()
    if combined_content:
        create_context_cache(combined_content)
    else:
        logger.warning("No content found to create a context cache.")
