#!/usr/bin/env python3
"""
Load existing tradie CSVs into PostgreSQL tradie_leads table.
Handles multiple CSV formats from different scrapers.
Deduplicates by phone number.

Usage:
    python load_csv_to_db.py

Output written to: E:\genesis-system\scripts\load_csv_result.txt
"""

import sys
import csv
import os
import re
from pathlib import Path
from datetime import datetime

sys.path.append('E:/genesis-system/data/genesis-memory')

RESULTS = []

def log(msg):
    print(msg)
    RESULTS.append(msg)

def get_db_conn():
    from elestio_config import PostgresConfig
    import psycopg2
    return psycopg2.connect(**PostgresConfig.get_connection_params())

def ensure_table(conn):
    cur = conn.cursor()
    cur.execute("""
        CREATE TABLE IF NOT EXISTS tradie_leads (
            id              SERIAL PRIMARY KEY,
            business_name   TEXT NOT NULL,
            trade_category  TEXT,
            phone           TEXT,
            website         TEXT,
            street_address  TEXT,
            suburb          TEXT,
            state           TEXT,
            postcode        TEXT,
            email           TEXT,
            rating          TEXT,
            review_count    TEXT,
            source          TEXT DEFAULT 'csv_import',
            source_url      TEXT,
            scraped_date    TEXT,
            has_website     BOOLEAN,
            website_score   INTEGER,
            priority_score  FLOAT,
            estimated_revenue_band TEXT,
            outreach_status TEXT DEFAULT 'not_contacted',
            enriched_at     TIMESTAMP,
            created_at      TIMESTAMP DEFAULT NOW(),
            UNIQUE(phone)
        )
    """)
    conn.commit()

def load_existing_phones(conn):
    cur = conn.cursor()
    cur.execute("SELECT phone FROM tradie_leads WHERE phone IS NOT NULL AND phone != ''")
    return {r[0] for r in cur.fetchall()}

def normalise_phone(phone: str) -> str:
    if not phone:
        return ''
    p = re.sub(r'[^\d+]', '', str(phone).strip())
    return p

def insert_batch(conn, records: list) -> int:
    if not records:
        return 0
    cur = conn.cursor()
    inserted = 0
    for r in records:
        try:
            cur.execute("""
                INSERT INTO tradie_leads
                    (business_name, trade_category, phone, website, street_address,
                     suburb, state, postcode, email, rating, review_count,
                     source, source_url, scraped_date)
                VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
                ON CONFLICT (phone) DO NOTHING
            """, (
                r.get('business_name', '')[:200],
                r.get('trade_category', '')[:100],
                r.get('phone', '')[:30],
                r.get('website', '')[:500],
                r.get('street_address', '')[:300],
                r.get('suburb', '')[:100],
                r.get('state', '')[:10],
                r.get('postcode', '')[:10],
                r.get('email', '')[:200],
                r.get('rating', '')[:10],
                r.get('review_count', '')[:10],
                r.get('source', 'csv_import')[:50],
                r.get('source_url', '')[:500],
                r.get('scraped_date', datetime.now().date().isoformat()),
            ))
            if cur.rowcount > 0:
                inserted += 1
        except Exception as e:
            conn.rollback()
            continue
    conn.commit()
    return inserted

def load_localsearch_csv(path: str, conn, existing_phones: set, source_name: str) -> int:
    """Load standard localsearch format CSV."""
    loaded = 0
    try:
        with open(path, encoding='utf-8', errors='replace') as f:
            reader = csv.DictReader(f)
            batch = []
            for row in reader:
                phone = normalise_phone(row.get('phone', ''))
                if not row.get('business_name'):
                    continue
                if phone and phone in existing_phones:
                    continue
                if phone:
                    existing_phones.add(phone)
                record = {
                    'business_name':  row.get('business_name', ''),
                    'trade_category': row.get('trade_category', ''),
                    'phone':          phone,
                    'website':        row.get('website', ''),
                    'street_address': row.get('street_address', ''),
                    'suburb':         row.get('suburb', ''),
                    'state':          row.get('state', ''),
                    'postcode':       row.get('postcode', ''),
                    'email':          row.get('email', ''),
                    'rating':         row.get('rating', ''),
                    'review_count':   row.get('review_count', ''),
                    'source':         source_name,
                    'source_url':     row.get('source_url', ''),
                    'scraped_date':   row.get('scraped_date', ''),
                }
                batch.append(record)
                if len(batch) >= 100:
                    loaded += insert_batch(conn, batch)
                    batch = []
            if batch:
                loaded += insert_batch(conn, batch)
    except Exception as e:
        log(f"  ERROR reading {path}: {e}")
    return loaded

def load_zoho_csv(path: str, conn, existing_phones: set, source_name: str) -> int:
    """Load ZOHO_SIMPLE_IMPORT format CSV."""
    loaded = 0
    try:
        with open(path, encoding='utf-8-sig', errors='replace') as f:
            reader = csv.DictReader(f)
            batch = []
            for row in reader:
                phone = normalise_phone(row.get('Phone', ''))
                company = row.get('Company', row.get('Last Name', ''))
                if not company:
                    continue
                if phone and phone in existing_phones:
                    continue
                if phone:
                    existing_phones.add(phone)

                # Parse description for signals
                desc = row.get('Description', '')
                rating = ''
                reviews = ''
                m = re.search(r'Rating:\s*([\d.]+)', desc)
                if m:
                    rating = m.group(1)
                m = re.search(r'Reviews:\s*(\d+)', desc)
                if m:
                    reviews = m.group(1)

                record = {
                    'business_name':  company,
                    'trade_category': row.get('Industry', '').lower(),
                    'phone':          phone,
                    'website':        row.get('Website', ''),
                    'street_address': row.get('Street', ''),
                    'suburb':         row.get('City', ''),
                    'state':          row.get('State', ''),
                    'postcode':       row.get('Zip Code', ''),
                    'email':          row.get('Email', ''),
                    'rating':         rating,
                    'review_count':   reviews,
                    'source':         source_name,
                    'source_url':     '',
                    'scraped_date':   datetime.now().date().isoformat(),
                }
                batch.append(record)
                if len(batch) >= 100:
                    loaded += insert_batch(conn, batch)
                    batch = []
            if batch:
                loaded += insert_batch(conn, batch)
    except Exception as e:
        log(f"  ERROR reading {path}: {e}")
    return loaded

def main():
    log(f"CSV -> PostgreSQL Loader — {datetime.now().isoformat()}")
    log("=" * 60)

    conn = get_db_conn()
    ensure_table(conn)
    existing_phones = load_existing_phones(conn)
    log(f"Pre-loaded {len(existing_phones)} existing phones from DB")

    total_loaded = 0

    # ── File 1: localsearch_tradies_DEDUPED.csv ──────────────────────
    f1 = 'E:/genesis-system/data/LEADS/localsearch_tradies_DEDUPED.csv'
    if os.path.exists(f1):
        n = load_localsearch_csv(f1, conn, existing_phones, 'localsearch_deduped')
        log(f"[localsearch_DEDUPED] Loaded {n} new leads")
        total_loaded += n

    # ── File 2: TRADIE LEADS AUSTRALIA/ZOHO_SIMPLE_IMPORT.csv ────────
    f2 = 'E:/genesis-system/data/LEADS/TRADIE LEADS AUSTRALIA/ZOHO_SIMPLE_IMPORT.csv'
    if os.path.exists(f2):
        n = load_zoho_csv(f2, conn, existing_phones, 'google_maps_zoho')
        log(f"[TRADIE_LEADS_AU/ZOHO] Loaded {n} new leads")
        total_loaded += n

    # ── File 3: Manus enriched ────────────────────────────────────────
    f3 = 'E:/genesis-system/data/LEADS/Manus Email enriched Tradie Leads/ZOHO_SIMPLE_IMPORT_ENRICHED.csv'
    if os.path.exists(f3):
        n = load_zoho_csv(f3, conn, existing_phones, 'manus_enriched')
        log(f"[Manus enriched] Loaded {n} new leads")
        total_loaded += n

    # ── File 4: Individual localsearch CSVs ───────────────────────────
    individual_files = [
        'E:/genesis-system/data/LEADS/localsearch_brisbane_plumbers.csv',
        'E:/genesis-system/data/LEADS/localsearch_brisbane_electricians.csv',
        'E:/genesis-system/data/LEADS/localsearch_brisbane_concreters.csv',
        'E:/genesis-system/data/LEADS/localsearch_brisbane_landscapers_tilers.csv',
        'E:/genesis-system/data/LEADS/localsearch_brisbane_roofers_painters.csv',
        'E:/genesis-system/data/LEADS/localsearch_plumbers_brisbane.csv',
    ]
    for fpath in individual_files:
        if os.path.exists(fpath):
            fname = os.path.basename(fpath)
            n = load_localsearch_csv(fpath, conn, existing_phones, f'localsearch_{fname}')
            log(f"[{fname}] Loaded {n} new leads")
            total_loaded += n

    # ── Final count ───────────────────────────────────────────────────
    cur = conn.cursor()
    cur.execute("SELECT COUNT(*) FROM tradie_leads")
    db_total = cur.fetchone()[0]

    log(f"\n{'='*60}")
    log(f"LOAD COMPLETE")
    log(f"New leads loaded this run: {total_loaded}")
    log(f"Total in tradie_leads DB:  {db_total}")
    log(f"Finished: {datetime.now().isoformat()}")

    conn.close()

    # Write results to file
    with open('E:/genesis-system/scripts/load_csv_result.txt', 'w') as f:
        f.write('\n'.join(RESULTS))

    return db_total

if __name__ == "__main__":
    main()
