import os
import hashlib
import time
import logging
import psycopg2
import psycopg2.extras
from typing import List, Dict, Optional
from redis import Redis
from qdrant_client import QdrantClient

# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')

class ChangeTracker:
    """
    Tracks changes to files in a specified directory.
    """

    def __init__(self, directory: str, db_params: Dict, redis_params: Dict, qdrant_params: Dict):
        """
        Initializes the ChangeTracker.

        Args:
            directory (str): The directory to monitor for changes.
            db_params (Dict): Database connection parameters.
            redis_params (Dict): Redis connection parameters.
            qdrant_params (Dict): Qdrant connection parameters.
        """
        self.directory = directory
        self.db_params = db_params
        self.redis_params = redis_params
        self.qdrant_params = qdrant_params
        self.redis_client = Redis(**self.redis_params)

        try:
            self.conn = psycopg2.connect(**self.db_params)
            self.cursor = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
            self._create_table()
        except psycopg2.Error as e:
            logging.error(f"Database connection error: {e}")
            raise

        try:
            self.qdrant_client = QdrantClient(**self.qdrant_params)
        except Exception as e:
            logging.error(f"Qdrant connection error: {e}")
            raise

    def _create_table(self):
        """
        Creates the change tracking table in the database if it doesn't exist.
        """
        try:
            self.cursor.execute("""
                CREATE TABLE IF NOT EXISTS document_changes (
                    file_path VARCHAR(255) PRIMARY KEY,
                    old_hash VARCHAR(255),
                    new_hash VARCHAR(255),
                    change_timestamp TIMESTAMP WITHOUT TIME ZONE
                )
            """)
            self.conn.commit()
        except psycopg2.Error as e:
            logging.error(f"Error creating table: {e}")
            self.conn.rollback()
            raise

    def _calculate_hash(self, file_path: str) -> Optional[str]:
        """
        Calculates the SHA-256 hash of a file.

        Args:
            file_path (str): The path to the file.

        Returns:
            Optional[str]: The SHA-256 hash of the file, or None if the file does not exist or an error occurs.
        """
        try:
            with open(file_path, "rb") as f:
                file_content = f.read()
                return hashlib.sha256(file_content).hexdigest()
        except FileNotFoundError:
            logging.warning(f"File not found: {file_path}")
            return None
        except Exception as e:
            logging.error(f"Error calculating hash for {file_path}: {e}")
            return None

    def track_changes(self):
        """
        Tracks changes to files in the specified directory.
        """
        logging.info("Tracking changes...")
        files = self._get_all_files(self.directory)
        for file_path in files:
            self._process_file(file_path)
        logging.info("Change tracking complete.")

    def _get_all_files(self, directory: str) -> List[str]:
        """
        Gets a list of all files in the specified directory and its subdirectories.

        Args:
            directory (str): The directory to search.

        Returns:
            List[str]: A list of file paths.
        """
        file_paths = []
        for root, _, files in os.walk(directory):
            for file in files:
                file_path = os.path.join(root, file)
                file_paths.append(file_path)
        return file_paths

    def _process_file(self, file_path: str):
        """
        Processes a single file, checking for changes and updating the database.

        Args:
            file_path (str): The path to the file.
        """
        try:
            old_hash = self._get_old_hash(file_path)
            new_hash = self._calculate_hash(file_path)

            if new_hash is None:
                # File likely deleted or inaccessible
                if old_hash:
                    self._log_deletion(file_path, old_hash)
                return

            if old_hash is None:
                # File is new
                self._log_addition(file_path, new_hash)
            elif old_hash != new_hash:
                # File has been modified
                self._log_modification(file_path, old_hash, new_hash)
            else:
                # No change
                pass

        except Exception as e:
            logging.error(f"Error processing file {file_path}: {e}")

    def _get_old_hash(self, file_path: str) -> Optional[str]:
        """
        Gets the old hash of a file from the database.

        Args:
            file_path (str): The path to the file.

        Returns:
            Optional[str]: The old hash of the file, or None if the file is not in the database.
        """
        try:
            self.cursor.execute("SELECT old_hash FROM document_changes WHERE file_path = %s", (file_path,))
            result = self.cursor.fetchone()
            if result:
                return result['old_hash']
            else:
                return None
        except psycopg2.Error as e:
            logging.error(f"Error getting old hash for {file_path}: {e}")
            return None

    def _log_addition(self, file_path: str, new_hash: str):
        """
        Logs the addition of a new file to the database.

        Args:
            file_path (str): The path to the file.
            new_hash (str): The new hash of the file.
        """
        try:
            self.cursor.execute("""
                INSERT INTO document_changes (file_path, old_hash, new_hash, change_timestamp)
                VALUES (%s, %s, %s, %s)
            """, (file_path, None, new_hash, time.strftime('%Y-%m-%d %H:%M:%S')))
            self.conn.commit()
            logging.info(f"File added: {file_path}")
            self._trigger_reindexing(file_path)
        except psycopg2.Error as e:
            logging.error(f"Error logging addition of {file_path}: {e}")
            self.conn.rollback()

    def _log_modification(self, file_path: str, old_hash: str, new_hash: str):
        """
        Logs the modification of a file to the database.

        Args:
            file_path (str): The path to the file.
            old_hash (str): The old hash of the file.
            new_hash (str): The new hash of the file.
        """
        try:
            self.cursor.execute("""
                UPDATE document_changes
                SET old_hash = %s, new_hash = %s, change_timestamp = %s
                WHERE file_path = %s
            """, (old_hash, new_hash, time.strftime('%Y-%m-%d %H:%M:%S'), file_path))
            self.conn.commit()
            logging.info(f"File modified: {file_path}")
            self._trigger_reindexing(file_path)
        except psycopg2.Error as e:
            logging.error(f"Error logging modification of {file_path}: {e}")
            self.conn.rollback()

    def _log_deletion(self, file_path: str, old_hash: str):
        """
        Logs the deletion of a file to the database.

        Args:
            file_path (str): The path to the file.
            old_hash (str): The old hash of the file.
        """
        try:
            self.cursor.execute("DELETE FROM document_changes WHERE file_path = %s", (file_path,))
            self.conn.commit()
            logging.info(f"File deleted: {file_path}")
            self._trigger_reindexing(file_path)
        except psycopg2.Error as e:
            logging.error(f"Error logging deletion of {file_path}: {e}")
            self.conn.rollback()

    def _trigger_reindexing(self, file_path: str):
        """
        Triggers re-indexing of a file by publishing a message to Redis.

        Args:
            file_path (str): The path to the file.
        """
        try:
            self.redis_client.publish('reindexing_channel', file_path)
            logging.info(f"Re-indexing triggered for: {file_path}")
        except Exception as e:
            logging.error(f"Error triggering re-indexing for {file_path}: {e}")

    def close(self):
        """
        Closes the database connection.
        """
        if self.conn:
            self.cursor.close()
            self.conn.close()
            logging.info("Database connection closed.")



if __name__ == '__main__':
    # Example Usage (replace with your actual configuration)
    db_params = {
        'dbname': 'your_db_name',
        'user': 'your_user',
        'password': 'your_password',
        'host': 'postgresql-genesis-u50607.vm.elestio.app',
        'port': 25432
    }

    redis_params = {
        'host': 'redis-genesis-u50607.vm.elestio.app',
        'port': 26379,
        'db': 0
    }

    qdrant_params = {
        'host': 'qdrant-b3knu-u50607.vm.elestio.app',
        'port': 6333,
        'https': False
    }
    directory_to_track = "/mnt/e/genesis-system/data" # Replace with a directory containing test files

    # Create a dummy directory if it doesn't exist
    if not os.path.exists(directory_to_track):
        os.makedirs(directory_to_track)

    # Create a dummy file if it doesn't exist
    dummy_file_path = os.path.join(directory_to_track, "dummy.txt")
    if not os.path.exists(dummy_file_path):
        with open(dummy_file_path, "w") as f:
            f.write("This is a dummy file.")

    tracker = ChangeTracker(directory_to_track, db_params, redis_params, qdrant_params)
    try:
        tracker.track_changes()
    finally:
        tracker.close()
