#!/usr/bin/env python3
"""Fast CSV loader — uses COPY + temp table for bulk insert speed."""
import sys, csv, re, os
from datetime import datetime
sys.path.append('E:/genesis-system/data/genesis-memory')

RESULTS = []

def log(msg):
    print(msg, flush=True)
    RESULTS.append(msg)

def get_conn():
    from elestio_config import PostgresConfig
    import psycopg2
    return psycopg2.connect(**PostgresConfig.get_connection_params())

def ensure_table(conn):
    cur = conn.cursor()
    cur.execute("""
        CREATE TABLE IF NOT EXISTS tradie_leads (
            id              SERIAL PRIMARY KEY,
            business_name   TEXT NOT NULL,
            trade_category  TEXT,
            phone           TEXT,
            website         TEXT,
            street_address  TEXT,
            suburb          TEXT,
            state           TEXT,
            postcode        TEXT,
            email           TEXT,
            rating          TEXT,
            review_count    TEXT,
            source          TEXT DEFAULT 'csv_import',
            source_url      TEXT,
            scraped_date    TEXT,
            has_website     BOOLEAN,
            website_score   INTEGER,
            priority_score  FLOAT,
            estimated_revenue_band TEXT,
            outreach_status TEXT DEFAULT 'not_contacted',
            enriched_at     TIMESTAMP,
            created_at      TIMESTAMP DEFAULT NOW(),
            UNIQUE(phone)
        )
    """)
    conn.commit()
    log("Table ready")

def normalise_phone(p):
    if not p: return ''
    return re.sub(r'[^\d+]', '', str(p).strip())

def load_file(conn, path, fmt, source_name):
    """Load one CSV. fmt='localsearch' or 'zoho'."""
    if not os.path.exists(path):
        log(f"SKIP (not found): {path}")
        return 0
    cur = conn.cursor()
    inserted = 0
    skipped = 0
    try:
        with open(path, encoding='utf-8-sig', errors='replace') as f:
            reader = csv.DictReader(f)
            rows = list(reader)
        log(f"  Reading {len(rows)} rows from {os.path.basename(path)}")

        for row in rows:
            if fmt == 'localsearch':
                name  = (row.get('business_name') or '').strip()
                phone = normalise_phone(row.get('phone',''))
                rec = {
                    'business_name':  name,
                    'trade_category': (row.get('trade_category') or '').strip(),
                    'phone':          phone,
                    'website':        (row.get('website') or '').strip()[:500],
                    'street_address': (row.get('street_address') or '').strip()[:300],
                    'suburb':         (row.get('suburb') or '').strip()[:100],
                    'state':          (row.get('state') or '').strip()[:10],
                    'postcode':       (row.get('postcode') or '').strip()[:10],
                    'email':          (row.get('email') or '').strip()[:200],
                    'rating':         (row.get('rating') or '').strip()[:10],
                    'review_count':   (row.get('review_count') or '').strip()[:10],
                    'source':         source_name,
                    'source_url':     (row.get('source_url') or '').strip()[:500],
                    'scraped_date':   (row.get('scraped_date') or datetime.now().date().isoformat()),
                }
            else:  # zoho
                company = (row.get('Company') or row.get('Last Name') or '').strip()
                phone = normalise_phone(row.get('Phone',''))
                desc = row.get('Description','')
                rating = ''
                reviews = ''
                m = re.search(r'Rating:\s*([\d.]+)', desc)
                if m: rating = m.group(1)
                m = re.search(r'Reviews:\s*(\d+)', desc)
                if m: reviews = m.group(1)
                rec = {
                    'business_name':  company,
                    'trade_category': (row.get('Industry') or '').lower().strip()[:100],
                    'phone':          phone,
                    'website':        (row.get('Website') or '').strip()[:500],
                    'street_address': (row.get('Street') or '').strip()[:300],
                    'suburb':         (row.get('City') or '').strip()[:100],
                    'state':          (row.get('State') or '').strip()[:50],
                    'postcode':       (row.get('Zip Code') or '').strip()[:10],
                    'email':          (row.get('Email') or '').strip()[:200],
                    'rating':         rating,
                    'review_count':   reviews,
                    'source':         source_name,
                    'source_url':     '',
                    'scraped_date':   datetime.now().date().isoformat(),
                }

            if not rec['business_name']:
                skipped += 1
                continue
            if not rec['phone']:
                skipped += 1
                continue

            try:
                cur.execute("""
                    INSERT INTO tradie_leads
                        (business_name, trade_category, phone, website, street_address,
                         suburb, state, postcode, email, rating, review_count,
                         source, source_url, scraped_date)
                    VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)
                    ON CONFLICT (phone) DO NOTHING
                """, (
                    rec['business_name'], rec['trade_category'], rec['phone'],
                    rec['website'], rec['street_address'], rec['suburb'],
                    rec['state'], rec['postcode'], rec['email'],
                    rec['rating'], rec['review_count'], rec['source'],
                    rec['source_url'], rec['scraped_date']
                ))
                if cur.rowcount > 0:
                    inserted += 1
            except Exception as e:
                conn.rollback()

        conn.commit()
        log(f"  -> {inserted} inserted, {skipped} skipped (no name/phone)")
    except Exception as e:
        conn.rollback()
        log(f"  ERROR: {e}")
        inserted = 0
    return inserted

def main():
    log(f"Fast CSV Loader — {datetime.now().isoformat()}")
    log("=" * 50)
    conn = get_conn()
    ensure_table(conn)

    total = 0

    # Priority: deduped localsearch first
    files = [
        ('E:/genesis-system/data/LEADS/localsearch_tradies_DEDUPED.csv',   'localsearch', 'localsearch_deduped'),
        ('E:/genesis-system/data/LEADS/localsearch_brisbane_plumbers.csv',  'localsearch', 'localsearch_ls'),
        ('E:/genesis-system/data/LEADS/localsearch_brisbane_electricians.csv','localsearch','localsearch_ls'),
        ('E:/genesis-system/data/LEADS/localsearch_brisbane_concreters.csv','localsearch', 'localsearch_ls'),
        ('E:/genesis-system/data/LEADS/localsearch_brisbane_landscapers_tilers.csv','localsearch','localsearch_ls'),
        ('E:/genesis-system/data/LEADS/localsearch_brisbane_roofers_painters.csv','localsearch','localsearch_ls'),
        ('E:/genesis-system/data/LEADS/localsearch_plumbers_brisbane.csv',  'localsearch', 'localsearch_ls'),
        ('E:/genesis-system/data/LEADS/TRADIE LEADS AUSTRALIA/ZOHO_SIMPLE_IMPORT.csv','zoho','google_maps'),
        ('E:/genesis-system/data/LEADS/Manus Email enriched Tradie Leads/ZOHO_SIMPLE_IMPORT_ENRICHED.csv','zoho','manus_enriched'),
    ]

    for path, fmt, source in files:
        log(f"\nLoading: {os.path.basename(path)}")
        n = load_file(conn, path, fmt, source)
        total += n

    # Final count
    cur = conn.cursor()
    cur.execute("SELECT COUNT(*) FROM tradie_leads")
    db_total = cur.fetchone()[0]

    log(f"\n{'='*50}")
    log(f"DONE. Inserted this run: {total}")
    log(f"Total in DB: {db_total}")
    log(f"Finished: {datetime.now().isoformat()}")

    conn.close()

    with open('E:/genesis-system/scripts/load_csv_result.txt', 'w') as f:
        f.write('\n'.join(RESULTS))
    log("Result written to load_csv_result.txt")

if __name__ == '__main__':
    main()
