import argparse
import logging
import time
from pathlib import Path
import sys

# Add the clone_pipeline directory to the Python path
sys.path.insert(0, str(Path(__file__).parent))

from extractor import extract_content
from reconstructor import reconstruct_site
from qa_checker import check_pagespeed_both, validate_html, generate_report
from deployer import deploy_to_netlify, save_locally, start_local_server

# --- Configuration ---
WIDGET_SNIPPET = """
    <!-- Genesis AI Chatbot Widget -->
    <div id="genesis-chat" style="position:fixed;bottom:20px;right:20px;z-index:9999">
      <button onclick="alert('Hi! I'm the AI Assistant.

I can help you:
• Book a service
• Get an instant quote
• General inquiry

Call us now!')"
        style="background:#2563eb;color:white;border:none;border-radius:50%;width:64px;height:64px;font-size:28px;cursor:pointer;box-shadow:0 4px 16px rgba(37,99,235,0.5);transition:transform 0.2s"
        onmouseover="this.style.transform='scale(1.1)'"
        onmouseout="this.style.transform='scale(1)'">💬</button>
      <div style="position:absolute;bottom:72px;right:0;background:white;border-radius:12px;padding:8px 12px;font-size:12px;white-space:nowrap;box-shadow:0 2px 8px rgba(0,0,0,0.15);color:#1e3a5f;font-weight:600">AI Assistant - Chat Now!</div>
    </div>
"""
REPORT_OUTPUT_DIR = Path("/mnt/e/genesis-system/hive/progress/")

def main():
    parser = argparse.ArgumentParser(description="Genesis Website Clone Pipeline")
    parser.add_argument("--url", required=True, help="URL of the website to clone")
    parser.add_argument("--business-name", required=True, help="Name of the business")
    parser.add_argument("--services", default="", help="Comma-separated list of services")
    parser.add_argument("--phone", default="", help="Business phone number")
    parser.add_argument("--location", default="", help="Business location (city/region)")
    parser.add_argument("--local-only", action="store_true", help="Save output locally only, do not deploy to Netlify")
    parser.add_argument("--output-slug", help="Slug for output directory/site name. Defaults to business-name slug.")
    parser.add_argument("--log-level", default="INFO", help="Logging level (DEBUG, INFO, WARNING, ERROR)")

    args = parser.parse_args()

    logging.basicConfig(level=getattr(logging, args.log_level.upper(), None),
                        format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    logger = logging.getLogger("clone_pipeline.run")

    start_time = time.time()

    # Determine output slug
    if args.output_slug:
        client_slug = args.output_slug
    else:
        client_slug = args.business_name.lower().replace(" ", "-")
        client_slug = "".join(c if c.isalnum() or c == "-" else "" for c in client_slug)
        client_slug = client_slug.strip("-")

    pipeline_output_dir = Path(f"/mnt/e/genesis-system/scripts/clone_pipeline/output/{client_slug}")
    pipeline_output_dir.mkdir(parents=True, exist_ok=True)

    logger.info(f"--- Starting Clone Pipeline for {args.business_name} ({args.url}) ---")
    logger.info(f"Output will be saved to: {pipeline_output_dir}")

    # 1. Extraction
    logger.info("Step 1: Extracting content...")
    extracted_data = extract_content(args.url, pipeline_output_dir)
    if not extracted_data["markdown"] and not extracted_data["html"]:
        logger.error("Content extraction failed. Aborting pipeline.")
        sys.exit(1)
    logger.info(f"Extraction successful using method: {extracted_data['extraction_method']}")

    # 2. Reconstruction
    logger.info("Step 2: Reconstructing site with Gemini...")
    reconstructed_html = reconstruct_site(
        content=extracted_data.get("markdown") or extracted_data.get("html"),
        business_name=args.business_name,
        services=args.services,
        phone=args.phone,
        location=args.location,
        url=args.url,
        widget_snippet=WIDGET_SNIPPET,
    )
    if not reconstructed_html:
        logger.error("Site reconstruction failed. Aborting pipeline.")
        sys.exit(1)
    logger.info(f"Reconstruction successful. HTML length: {len(reconstructed_html)} chars.")

    # Save reconstructed HTML locally for QA and potential local server
    reconstructed_html_path = save_locally(reconstructed_html, pipeline_output_dir, "index.html")
    logger.info(f"Reconstructed HTML saved to: {reconstructed_html_path}")

    # 3. QA Checks
    logger.info("Step 3: Performing QA checks...")
    html_validation_results = validate_html(
        reconstructed_html,
        business_name=args.business_name,
        phone=args.phone,
    )
    logger.info(f"HTML Validation Score: {html_validation_results['score']}/100")

    # If deploying to Netlify, run PageSpeed on the deployed URL
    # If local-only, run PageSpeed on a local server URL temporarily
    deployed_url = None
    if not args.local_only:
        # Try Netlify deployment
        logger.info("Attempting Netlify deployment...")
        netlify_url = deploy_to_netlify(
            reconstructed_html,
            site_name=client_slug,
        )
        if netlify_url:
            deployed_url = netlify_url
            logger.info(f"Site deployed to Netlify: {deployed_url}")
            # Give Netlify a moment to spin up and cache
            time.sleep(10)
        else:
            logger.warning("Netlify deployment failed or skipped (no token). Falling back to local server for PageSpeed.")
            # If Netlify fails, still try to get PageSpeed via local server
            local_server_url = start_local_server(pipeline_output_dir)
            deployed_url = local_server_url + "/index.html" # Assuming index.html is served
            logger.info(f"Local server started for PageSpeed: {deployed_url}")
            time.sleep(5) # Give local server a moment

        if deployed_url:
            pagespeed_results = check_pagespeed_both(deployed_url)
            logger.info(f"PageSpeed Mobile Score: {pagespeed_results['mobile']['score']}")
            logger.info(f"PageSpeed Desktop Score: {pagespeed_results['desktop']['score']}")
        else:
            pagespeed_results = None
            logger.warning("Could not get a URL for PageSpeed checks.")
    else:
        # Local-only mode: start local server for PageSpeed and manual inspection
        logger.info("Local-only mode: Starting local server for PageSpeed checks and manual inspection.")
        local_server_url = start_local_server(pipeline_output_dir)
        deployed_url = local_server_url + "/index.html" # Assuming index.html is served
        logger.info(f"Local server started at: {deployed_url}")
        time.sleep(5) # Give local server a moment
        pagespeed_results = check_pagespeed_both(deployed_url)
        logger.info(f"PageSpeed Mobile Score: {pagespeed_results['mobile']['score']}")
        logger.info(f"PageSpeed Desktop Score: {pagespeed_results['desktop']['score']}")


    # 4. Generate Report
    logger.info("Step 4: Generating QA report...")
    elapsed_time = time.time() - start_time
    report_content = generate_report(
        original_url=args.url,
        new_url=deployed_url,
        business_name=args.business_name,
        client_slug=client_slug,
        pagespeed_scores=pagespeed_results,
        html_validation=html_validation_results,
        extraction_method=extracted_data["extraction_method"],
        elapsed_seconds=elapsed_time,
        output_dir=REPORT_OUTPUT_DIR, # Save to the progress directory
    )
    logger.info(f"QA Report generated and saved to {REPORT_OUTPUT_DIR / 'clone_report.md'}")
    logger.info(f"--- Pipeline Finished in {elapsed_time:.2f} seconds ---")

    print(f"\nPipeline complete for {args.business_name}!")
    if deployed_url:
        print(f"Live/Local URL: {deployed_url}")
    print(f"Report available at: {REPORT_OUTPUT_DIR / 'clone_report.md'}")
    print(f"Output directory: {pipeline_output_dir}")


if __name__ == "__main__":
    main()
