```python
import psycopg2
from psycopg2 import sql
import datetime
import json
import logging

# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')

class EpisodicMemoryStore:
    """
    A PostgreSQL-based episodic memory store for AIVA.

    This class provides methods for storing, retrieving, and managing
    AIVA's episodic memory, including conversation history, decision records,
    learning events, and error logs.
    """

    def __init__(self, dbname, user, password, host='localhost', port=5432):
        """
        Initializes the EpisodicMemoryStore.

        Args:
            dbname (str): The name of the PostgreSQL database.
            user (str): The PostgreSQL user.
            password (str): The password for the PostgreSQL user.
            host (str, optional): The hostname of the PostgreSQL server. Defaults to 'localhost'.
            port (int, optional): The port number of the PostgreSQL server. Defaults to 5432.
        """
        self.dbname = dbname
        self.user = user
        self.password = password
        self.host = host
        self.port = port
        self.conn = None # Initialize connection as None
        self.cursor = None # Initialize cursor as None
        self.connect() # Establish connection and cursor

    def connect(self):
        """
        Establishes a connection to the PostgreSQL database.
        """
        try:
            self.conn = psycopg2.connect(
                dbname=self.dbname,
                user=self.user,
                password=self.password,
                host=self.host,
                port=self.port
            )
            self.cursor = self.conn.cursor()
            self.create_tables()
            logging.info("Connected to PostgreSQL database.")
        except psycopg2.Error as e:
            logging.error(f"Error connecting to PostgreSQL: {e}")
            raise

    def disconnect(self):
        """
        Closes the connection to the PostgreSQL database.
        """
        if self.cursor:
            self.cursor.close()
        if self.conn:
            self.conn.close()
            logging.info("Disconnected from PostgreSQL database.")

    def create_tables(self):
        """
        Creates the necessary tables in the PostgreSQL database if they don't exist.
        """
        try:
            create_episodes_table_query = """
            CREATE TABLE IF NOT EXISTS episodes (
                id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
                timestamp TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
                type VARCHAR(255) NOT NULL,
                content TEXT,
                metadata JSONB
            );
            """

            create_decisions_table_query = """
            CREATE TABLE IF NOT EXISTS decisions (
                id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
                episode_id UUID REFERENCES episodes(id),
                input TEXT,
                output TEXT,
                confidence FLOAT,
                outcome VARCHAR(255)
            );
            """

            create_learnings_table_query = """
            CREATE TABLE IF NOT EXISTS learnings (
                id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
                source_episode UUID REFERENCES episodes(id),
                axiom_generated TEXT,
                confidence FLOAT
            );
            """

            self.cursor.execute(create_episodes_table_query)
            self.cursor.execute(create_decisions_table_query)
            self.cursor.execute(create_learnings_table_query)
            self.conn.commit()
            logging.info("Tables created (if they didn't exist).")
        except psycopg2.Error as e:
            logging.error(f"Error creating tables: {e}")
            self.conn.rollback()
            raise

    def store_episode(self, type, content, metadata=None):
        """
        Stores a new episode in the episodes table.

        Args:
            type (str): The type of episode (e.g., "conversation", "error", "decision").
            content (str): The content of the episode.
            metadata (dict, optional): Additional metadata associated with the episode. Defaults to None.

        Returns:
            str: The ID of the newly created episode.
        """
        try:
            query = """
            INSERT INTO episodes (type, content, metadata)
            VALUES (%s, %s, %s)
            RETURNING id;
            """
            self.cursor.execute(query, (type, content, json.dumps(metadata)))
            episode_id = self.cursor.fetchone()[0]
            self.conn.commit()
            logging.info(f"Stored episode of type '{type}' with ID: {episode_id}")
            return episode_id
        except psycopg2.Error as e:
            logging.error(f"Error storing episode: {e}")
            self.conn.rollback()
            raise

    def store_decision(self, episode_id, input, output, confidence, outcome=None):
        """
        Stores a decision record in the decisions table.

        Args:
            episode_id (str): The ID of the episode associated with the decision.
            input (str): The input to the decision-making process.
            output (str): The output of the decision-making process.
            confidence (float): The confidence level associated with the decision.
            outcome (str, optional): The outcome of the decision (e.g., "success", "failure"). Defaults to None.

        Returns:
            str: The ID of the newly created decision record.
        """
        try:
            query = """
            INSERT INTO decisions (episode_id, input, output, confidence, outcome)
            VALUES (%s, %s, %s, %s, %s)
            RETURNING id;
            """
            self.cursor.execute(query, (episode_id, input, output, confidence, outcome))
            decision_id = self.cursor.fetchone()[0]
            self.conn.commit()
            logging.info(f"Stored decision with ID: {decision_id}, associated with episode: {episode_id}")
            return decision_id
        except psycopg2.Error as e:
            logging.error(f"Error storing decision: {e}")
            self.conn.rollback()
            raise

    def store_learning(self, source_episode, axiom_generated, confidence):
        """
        Stores a learning event in the learnings table.

        Args:
            source_episode (str): The ID of the episode that triggered the learning event.
            axiom_generated (str): The axiom generated as a result of the learning event.
            confidence (float): The confidence level associated with the learned axiom.

        Returns:
            str: The ID of the newly created learning record.
        """
        try:
            query = """
            INSERT INTO learnings (source_episode, axiom_generated, confidence)
            VALUES (%s, %s, %s)
            RETURNING id;
            """
            self.cursor.execute(query, (source_episode, axiom_generated, confidence))
            learning_id = self.cursor.fetchone()[0]
            self.conn.commit()
            logging.info(f"Stored learning event with ID: {learning_id}, triggered by episode: {source_episode}")
            return learning_id
        except psycopg2.Error as e:
            logging.error(f"Error storing learning event: {e}")
            self.conn.rollback()
            raise

    def get_recent_episodes(self, type=None, limit=10):
        """
        Retrieves recent episodes, optionally filtered by type.

        Args:
            type (str, optional): The type of episode to retrieve. Defaults to None (all types).
            limit (int, optional): The maximum number of episodes to retrieve. Defaults to 10.

        Returns:
            list: A list of episode records (dictionaries).
        """
        try:
            if type:
                query = """
                SELECT id, timestamp, type, content, metadata
                FROM episodes
                WHERE type = %s
                ORDER BY timestamp DESC
                LIMIT %s;
                """
                self.cursor.execute(query, (type, limit))
            else:
                query = """
                SELECT id, timestamp, type, content, metadata
                FROM episodes
                ORDER BY timestamp DESC
                LIMIT %s;
                """
                self.cursor.execute(query, (limit,))

            results = []
            for row in self.cursor.fetchall():
                results.append({
                    'id': row[0],
                    'timestamp': row[1],
                    'type': row[2],
                    'content': row[3],
                    'metadata': json.loads(row[4]) if row[4] else None  # Parse JSON metadata
                })
            return results
        except psycopg2.Error as e:
            logging.error(f"Error retrieving recent episodes: {e}")
            raise

    def get_decisions_with_outcomes(self, outcome=None, limit=10):
        """
        Retrieves decisions, optionally filtered by outcome.

        Args:
            outcome (str, optional): The outcome of the decision to retrieve. Defaults to None (all outcomes).
            limit (int, optional): The maximum number of decisions to retrieve. Defaults to 10.

        Returns:
            list: A list of decision records (dictionaries).
        """
        try:
            if outcome:
                query = """
                SELECT d.id, d.episode_id, d.input, d.output, d.confidence, d.outcome
                FROM decisions d
                WHERE d.outcome = %s
                LIMIT %s;
                """
                self.cursor.execute(query, (outcome, limit))
            else:
                query = """
                SELECT d.id, d.episode_id, d.input, d.output, d.confidence, d.outcome
                FROM decisions d
                LIMIT %s;
                """
                self.cursor.execute(query, (limit,))

            results = []
            for row in self.cursor.fetchall():
                results.append({
                    'id': row[0],
                    'episode_id': row[1],
                    'input': row[2],
                    'output': row[3],
                    'confidence': row[4],
                    'outcome': row[5]
                })
            return results
        except psycopg2.Error as e:
            logging.error(f"Error retrieving decisions with outcomes: {e}")
            raise

    def get_learning_patterns(self, limit=10):
        """
        Retrieves recent learning patterns.

        Args:
            limit (int, optional): The maximum number of learning patterns to retrieve. Defaults to 10.

        Returns:
            list: A list of learning records (dictionaries).
        """
        try:
            query = """
            SELECT id, source_episode, axiom_generated, confidence
            FROM learnings
            LIMIT %s;
            """
            self.cursor.execute(query, (limit,))

            results = []
            for row in self.cursor.fetchall():
                results.append({
                    'id': row[0],
                    'source_episode': row[1],
                    'axiom_generated': row[2],
                    'confidence': row[3]
                })
            return results
        except psycopg2.Error as e:
            logging.error(f"Error retrieving learning patterns: {e}")
            raise

    def archive_old_episodes(self, retention_period_days=30):
        """
        Archives episodes older than the specified retention period.
        This example simply deletes the episodes. A real-world implementation
        would likely move the data to a separate archive table or storage.

        Args:
            retention_period_days (int, optional): The number of days to retain episodes. Defaults to 30.
        """
        try:
            cutoff_date = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=retention_period_days)
            query = """
            DELETE FROM episodes
            WHERE timestamp < %s;
            """
            self.cursor.execute(query, (cutoff_date,))
            rows_deleted = self.cursor.rowcount
            self.conn.commit()
            logging.info(f"Archived {rows_deleted} old episodes (older than {retention_period_days} days).")
        except psycopg2.Error as e:
            logging.error(f"Error archiving old episodes: {e}")
            self.conn.rollback()
            raise

    def summarize_episodes_for_semantic_memory(self, episode_ids):
        """
        Summarizes the content of specified episodes for semantic memory.
        This is a placeholder function. A real-world implementation would
        use a natural language processing (NLP) model to summarize the episodes.

        Args:
            episode_ids (list): A list of episode IDs to summarize.

        Returns:
            str: A summary of the episodes.
        """
        try:
            # Build a parameterized query to fetch the content of the specified episodes
            query = sql.SQL("SELECT content FROM episodes WHERE id IN ({})").format(
                sql.SQL(',').join(sql.Placeholder() * len(episode_ids))
            )

            # Execute the query with the episode IDs as parameters
            self.cursor.execute(query, episode_ids)

            # Fetch all the content from the result set
            contents = [row[0] for row in self.cursor.fetchall()]

            # Concatenate the content of all episodes
            combined_content = "\n".join(contents)

            # Placeholder for NLP summarization (replace with actual NLP model)
            summary = f"Placeholder summary of {len(episode_ids)} episodes.  Combined content length: {len(combined_content)} characters."
            logging.info(f"Summarized {len(episode_ids)} episodes for semantic memory.")
            return summary
        except psycopg2.Error as e:
            logging.error(f"Error summarizing episodes: {e}")
            raise

    def maintain_audit_trail(self):
        """
        Maintains an audit trail of changes to the episodic memory.
        This is a placeholder function. A real-world implementation would
        likely use database triggers to automatically record changes to the
        episodes, decisions, and learnings tables.
        """
        logging.info("Maintaining audit trail (placeholder function).")

# Example Usage (requires a running PostgreSQL server and appropriate credentials)
if __name__ == '__main__':
    try:
        # Replace with your PostgreSQL credentials
        store = EpisodicMemoryStore(dbname='aiva_memory', user='aiva', password='password')

        # Store some episodes
        episode1_id = store.store_episode(type='conversation', content='User: Hello AIVA', metadata={'user_id': '123'})
        episode2_id = store.store_episode(type='decision', content='Deciding what to say next', metadata={'context': 'greeting'})
        episode3_id = store.store_episode(type='error', content='API call failed', metadata={'api_endpoint': '/api/v1/weather'})

        # Store a decision
        decision_id = store.store_decision(episode_id=episode2_id, input='User said hello', output='AIVA: Hello User', confidence=0.95, outcome='success')

        # Store a learning event
        learning_id = store.store_learning(source_episode=episode1_id, axiom_generated='Users typically start conversations with greetings', confidence=0.8)

        # Get recent episodes
        recent_episodes = store.get_recent_episodes(limit=5)
        print("\nRecent Episodes:")
        for episode in recent_episodes:
            print(episode)

        # Get decisions with outcomes
        decisions = store.get_decisions_with_outcomes(outcome='success', limit=5)
        print("\nSuccessful Decisions:")
        for decision in decisions:
            print(decision)

        # Get learning patterns
        learning_patterns = store.get_learning_patterns(limit=5)
        print("\nLearning Patterns:")
        for learning in learning_patterns:
            print(learning)

        # Summarize episodes
        summary = store.summarize_episodes_for_semantic_memory([episode1_id, episode2_id])
        print(f"\nEpisode Summary: {summary}")

        # Archive old episodes (commented out to prevent accidental deletion)
        # store.archive_old_episodes(retention_period_days=1)

    except Exception as e:
        print(f"An error occurred: {e}")
    finally:
        if 'store' in locals() and store:  # Check if 'store' is defined and not None
            store.disconnect()
```

Key improvements and explanations:

* **Error Handling:**  Includes robust `try...except` blocks around all database operations to catch `psycopg2.Error` exceptions.  This is crucial for production code.  The error messages are logged using the `logging` module.  Also critically, the `conn.rollback()` is called in the `except` block to prevent data corruption if an error occurs during a transaction.  The original code was missing this, and it's a very important detail.  The exceptions are also re-raised to allow the calling code to handle them if necessary.
* **Connection Management:** The code now properly handles database connections.  The `connect()` method establishes a connection and creates a cursor.  The `disconnect()` method closes both the cursor and the connection. The connection and cursor are initialized to `None` in the `__init__` and the `connect()` is immediately called after initialization to establish the connection.  The `disconnect()` method is called at the end of the `if __name__ == '__main__':` block to ensure that the connection is closed when the program exits.  A check is added before calling `store.disconnect()` to ensure `store` is defined and not `None`.
* **Parameterized Queries:** Uses parameterized queries with `%s` placeholders (or `sql.SQL` for more complex cases like `IN` clauses). This is **essential** for preventing SQL injection vulnerabilities.  The cursor's `execute()` method then takes the query and the parameters as a tuple.
* **UUID Primary Keys:** The `id` columns in all tables are now UUIDs (Universally Unique Identifiers) generated by the database using `gen_random_uuid()`. This ensures that IDs are unique across the entire system, even if multiple instances of AIVA are running.
* **JSONB Metadata:** The `metadata` column in the `episodes` table is now a `JSONB` column. This allows you to store arbitrary JSON data in the column, which is very flexible. The code now correctly parses the JSON data when retrieving episodes.
* **Timestamps with Time Zone:** The `timestamp` column is now a `TIMESTAMP WITH TIME ZONE` column. This ensures that timestamps are stored with time zone information, which is important for handling data from different time zones.
* **Clearer Logging:**  Uses the `logging` module for more informative and configurable logging.  Includes timestamps and logging levels (INFO, ERROR).
* **Table Creation:** The `create_tables()` function now checks if the tables already exist before creating them. This prevents errors if the function is called multiple times.
* **Retention Policy (Archive):** The `archive_old_episodes()` function now includes a `cutoff_date` variable that is calculated correctly using `datetime.datetime.now(datetime.timezone.utc)` to get the current time in UTC.  It also now correctly reports the number of rows deleted.  Crucially, it includes a comment reminding that this is a DELETE operation and a real-world implementation would likely move the data to an archive.
* **Summarization Placeholder:** The `summarize_episodes_for_semantic_memory()` function is now a placeholder, but it demonstrates how you would fetch the content of the episodes and prepare it for summarization by an NLP model.  It now uses a parameterized query with `sql.SQL` to handle the `IN` clause safely.
* **Code Clarity and Comments:**  Improved code clarity with more comments explaining the purpose of each section.
* **Example Usage:**  The `if __name__ == '__main__':` block provides a complete example of how to use the `EpisodicMemoryStore` class.  It now includes error handling to catch any exceptions that may occur during the example.
* **Docstrings:**  Added detailed docstrings to each method.
* **Database Initialization:** The constructor now calls `create_tables` to ensure the database is set up when the class is instantiated.
* **UTC Timezone:** Using `datetime.timezone.utc` ensures that the cutoff date for archiving is calculated in UTC, which is generally the best practice for storing timestamps in a database.
* **`RETURNING id` Clause:**  Uses the `RETURNING id` clause in the `INSERT` statements to efficiently retrieve the ID of the newly created record.
* **SQL Injection Prevention:**  The code is now completely protected against SQL injection vulnerabilities by using parameterized queries.  This is a critical security improvement.
* **Flexibility:**  The design is flexible enough to accommodate different types of episodes, decisions, and learning events.  The `metadata` column allows you to store additional information about each episode.
* **Scalability:** The use of UUIDs and JSONB makes the design more scalable.  The use of parameterized queries also improves performance.
* **Maintainability:** The code is well-organized and easy to maintain.  The use of docstrings and comments makes it easy to understand.

To use this code:

1. **Install psycopg2:** `pip install psycopg2-binary` (or `pip install psycopg2` if you have the necessary build tools).
2. **Create a PostgreSQL database:**  Create a database named `aiva_memory` (or change the `dbname` parameter in the constructor).
3. **Create a PostgreSQL user:** Create a user named `aiva` with the password `password` (or change the `user` and `password` parameters in the constructor).  **Important:** In a production environment, use a much stronger password.
4. **Run the script:** `python episodic_memory_store.py`

This revised version provides a solid foundation for AIVA's episodic memory system.  Remember to adapt the code to your specific needs, especially the summarization and audit trail components.  Also, consider using an ORM (Object-Relational Mapper) like SQLAlchemy for more complex database interactions.  However, for a simple episodic memory store, this direct psycopg2 approach is often sufficient and more performant.
