```python
import datetime
import numpy as np
from typing import Dict, List, Tuple, Any

class SemanticMemory:
    """
    AIVA's Semantic Memory System.  Manages long-term knowledge storage,
    integration with a vector store, knowledge organization, and memory promotion.
    """

    def __init__(self, vector_store):
        """
        Initializes the Semantic Memory.

        Args:
            vector_store: An object representing the vector store for embedding retrieval.
                           Must implement methods:
                           - `add(text, metadata)`: Adds text and associated metadata to the vector store.
                           - `query(query_text, k=5)`: Queries the vector store for the top k similar entries.
                           - `delete(ids)`: Deletes entries with the given ids from the vector store.
                           - `update_metadata(id, metadata)`: Updates the metadata of an existing entry.
        """
        self.vector_store = vector_store
        self.semantic_knowledge = {}  # {id: {text: str, metadata: dict, confidence: float}}
        self.episodic_memory = {}    # {id: {text: str, metadata: dict, confidence: float, timestamp: datetime}}
        self.working_memory = {}     # {id: {text: str, metadata: dict, confidence: float, timestamp: datetime}}
        self.id_counter = 0

        # Define hierarchical categories (example)
        self.categories = {
            "general_knowledge": [],
            "skills": ["communication", "reasoning", "problem_solving"],
            "user_preferences": ["communication_style", "response_length", "tone"],
            "domain_expertise": {
                "medical": ["anatomy", "physiology", "pharmacology"],
                "programming": ["python", "java", "javascript"]
            }
        }

        self.decay_rate = 0.01  # Rate at which confidence decays over time

    def _generate_id(self) -> int:
        """Generates a unique ID for each memory entry."""
        self.id_counter += 1
        return self.id_counter

    def add_to_working_memory(self, text: str, metadata: Dict[str, Any], confidence: float = 1.0) -> int:
        """Adds information to the working memory."""
        new_id = self._generate_id()
        self.working_memory[new_id] = {
            "text": text,
            "metadata": metadata,
            "confidence": confidence,
            "timestamp": datetime.datetime.now()
        }
        return new_id

    def save_working_memory(self, ids: List[int]):
        """
        Moves entries from working memory to episodic memory.

        Args:
            ids: A list of IDs of entries to be moved.
        """
        for id_to_save in ids:
            if id_to_save in self.working_memory:
                entry = self.working_memory.pop(id_to_save)
                self.episodic_memory[id_to_save] = entry
                print(f"Moved entry {id_to_save} to Episodic Memory.")
            else:
                print(f"Entry {id_to_save} not found in Working Memory.")

    def consolidate_episodic_memory(self, ids: List[int]):
        """
        Consolidates entries from episodic memory into semantic memory.
        This involves adding the information to the vector store for retrieval.

        Args:
            ids: A list of IDs of entries to be consolidated.
        """
        for id_to_consolidate in ids:
            if id_to_consolidate in self.episodic_memory:
                entry = self.episodic_memory.pop(id_to_consolidate)
                self.semantic_knowledge[id_to_consolidate] = entry
                self.vector_store.add(entry["text"], entry["metadata"]) # Add to vector store
                print(f"Consolidated entry {id_to_consolidate} to Semantic Memory and Vector Store.")
            else:
                print(f"Entry {id_to_consolidate} not found in Episodic Memory.")

    def retrieve_from_semantic_memory(self, query: str, k: int = 5) -> List[Tuple[str, Dict[str, Any], float]]:
        """
        Retrieves information from semantic memory using the vector store.

        Args:
            query: The query text.
            k: The number of results to return.

        Returns:
            A list of tuples, where each tuple contains the text, metadata, and confidence score.
        """
        results = self.vector_store.query(query, k=k)
        retrieved_entries = []
        for result in results: # Assuming the vector store returns (id, score) tuples
            id, score = result
            if id in self.semantic_knowledge:
                entry = self.semantic_knowledge[id]
                retrieved_entries.append((entry["text"], entry["metadata"], score)) # Using vector store's similarity score as confidence
            else:
                print(f"Warning: ID {id} found in vector store but not in semantic knowledge.") # Handle inconsistency
        return retrieved_entries

    def update_knowledge(self, id: int, text: str = None, metadata: Dict[str, Any] = None, confidence: float = None):
        """
        Updates existing knowledge in the semantic memory.  Updates the vector store as well.

        Args:
            id: The ID of the knowledge entry to update.
            text: The new text (optional).
            metadata: The new metadata (optional).
            confidence: The new confidence score (optional).
        """
        if id in self.semantic_knowledge:
            if text is not None:
                self.semantic_knowledge[id]["text"] = text
            if metadata is not None:
                self.semantic_knowledge[id]["metadata"] = metadata
                self.vector_store.update_metadata(id, metadata) # Update the vector store too!
            if confidence is not None:
                self.semantic_knowledge[id]["confidence"] = confidence
            print(f"Updated entry {id} in Semantic Memory and Vector Store.")
        else:
            print(f"Entry {id} not found in Semantic Memory.")

    def forget_knowledge(self, ids: List[int]):
        """
        Removes knowledge from the semantic memory. Removes from the vector store as well.

        Args:
            ids: A list of IDs of knowledge entries to remove.
        """
        for id_to_forget in ids:
            if id_to_forget in self.semantic_knowledge:
                del self.semantic_knowledge[id_to_forget]
                self.vector_store.delete([id_to_forget]) # Delete from vector store
                print(f"Forgot entry {id_to_forget} from Semantic Memory and Vector Store.")
            else:
                print(f"Entry {id_to_forget} not found in Semantic Memory.")

    def decay_confidence(self):
        """
        Decays the confidence of entries in the semantic memory over time.
        This simulates forgetting.
        """
        for id, entry in self.semantic_knowledge.items():
            entry["confidence"] = max(0, entry["confidence"] - self.decay_rate) # Ensure confidence doesn't go below 0
            # No need to update vector store confidence, as we're using its similarity score for retrieval

    def get_knowledge_by_category(self, category: str) -> List[Tuple[str, Dict[str, Any], float]]:
        """
        Retrieves knowledge entries belonging to a specific category.

        Args:
            category: The category to retrieve knowledge from.

        Returns:
            A list of tuples, where each tuple contains the text, metadata, and confidence score.
        """
        retrieved_entries = []
        for id, entry in self.semantic_knowledge.items():
            if "category" in entry["metadata"] and entry["metadata"]["category"] == category:
                retrieved_entries.append((entry["text"], entry["metadata"], entry["confidence"]))
        return retrieved_entries

    def print_memory_status(self):
        """Prints the number of entries in each memory level."""
        print(f"Working Memory: {len(self.working_memory)} entries")
        print(f"Episodic Memory: {len(self.episodic_memory)} entries")
        print(f"Semantic Memory: {len(self.semantic_knowledge)} entries")

# Example Vector Store (Dummy Implementation - Replace with a real one!)
class DummyVectorStore:
    def __init__(self):
        self.store = {}
        self.id_counter = 0

    def _generate_id(self) -> int:
        self.id_counter += 1
        return self.id_counter

    def add(self, text: str, metadata: Dict[str, Any]):
        new_id = self._generate_id()
        self.store[new_id] = {"text": text, "metadata": metadata}
        print(f"Added '{text}' to vector store with id {new_id}")
        return new_id

    def query(self, query_text: str, k: int = 5) -> List[Tuple[int, float]]:
        # Dummy similarity calculation (replace with actual embedding similarity)
        results = []
        for id, entry in self.store.items():
            similarity = 0.5 # Replace with a meaningful similarity score
            results.append((id, similarity))
        results.sort(key=lambda x: x[1], reverse=True) # Sort by similarity
        return results[:k]

    def delete(self, ids: List[int]):
        for id in ids:
            if id in self.store:
                del self.store[id]
                print(f"Deleted id {id} from vector store.")

    def update_metadata(self, id: int, metadata: Dict[str, Any]):
        if id in self.store:
            self.store[id]["metadata"] = metadata
            print(f"Updated metadata for id {id} in vector store.")

# Example Usage
if __name__ == '__main__':
    # Initialize a dummy vector store
    vector_store = DummyVectorStore()

    # Initialize the semantic memory system
    semantic_memory = SemanticMemory(vector_store)

    # Add information to working memory
    id1 = semantic_memory.add_to_working_memory("The sky is blue.", {"category": "general_knowledge"}, confidence=0.8)
    id2 = semantic_memory.add_to_working_memory("AIVA is a knowledge systems expert.", {"category": "skills"}, confidence=0.9)

    # Print memory status
    semantic_memory.print_memory_status()

    # Save working memory to episodic memory
    semantic_memory.save_working_memory([id1, id2])

    # Print memory status
    semantic_memory.print_memory_status()

    # Consolidate episodic memory to semantic memory
    semantic_memory.consolidate_episodic_memory([id1, id2])

    # Print memory status
    semantic_memory.print_memory_status()

    # Retrieve information from semantic memory
    results = semantic_memory.retrieve_from_semantic_memory("What is the sky?", k=1)
    if results:
        text, metadata, confidence = results[0]
        print(f"Retrieved: {text}, Metadata: {metadata}, Confidence: {confidence}")

    # Update knowledge
    semantic_memory.update_knowledge(id1, text="The sky is often blue.", confidence=0.95)

    # Decay confidence
    semantic_memory.decay_confidence()

    # Forget knowledge
    semantic_memory.forget_knowledge([id2])

    # Print memory status
    semantic_memory.print_memory_status()
```

Key improvements and explanations:

* **Clearer Structure:** The code is organized into a class `SemanticMemory` with well-defined methods, making it more modular and maintainable.
* **Vector Store Abstraction:** The code *requires* a `vector_store` object to be passed in, and assumes it has `add`, `query`, `delete`, and `update_metadata` methods. This makes the `SemanticMemory` class independent of the specific vector store implementation (e.g., FAISS, Annoy, ChromaDB).  A dummy implementation is provided for testing.
* **Memory Hierarchy:**  Explicit `working_memory`, `episodic_memory`, and `semantic_knowledge` dictionaries are created to represent the different memory levels.
* **Memory Promotion:**  `save_working_memory` and `consolidate_episodic_memory` methods handle the transfer of information between memory levels.  Data is *moved* not copied, to prevent duplication and ensure a single source of truth.
* **Integration with Vector Store:** The `consolidate_episodic_memory` method adds entries to the vector store.  The `retrieve_from_semantic_memory` method uses the vector store to find similar entries. The `update_knowledge` and `forget_knowledge` methods *also* update the vector store, ensuring consistency.
* **Knowledge Organization:**  A `categories` dictionary is included as an example of how knowledge can be organized hierarchically.  Metadata is used to store category information with each memory entry.
* **Confidence Decay:** The `decay_confidence` method simulates forgetting by reducing the confidence of entries over time.
* **Unique IDs:**  A `_generate_id` method ensures that each memory entry has a unique identifier. This is crucial for managing entries in the vector store and across different memory levels.
* **Error Handling:**  Includes checks to ensure that entries exist before attempting to update or delete them.  Provides warning messages if inconsistencies are found between the vector store and semantic knowledge.
* **Type Hints:** Type hints are used to improve code readability and maintainability.
* **Docstrings:**  Comprehensive docstrings explain the purpose of each class and method.
* **Example Usage:**  A clear example demonstrates how to use the `SemanticMemory` class.
* **Vector Store Similarity as Confidence:** The `retrieve_from_semantic_memory` function now leverages the similarity score from the vector store query as the confidence for the retrieved entry. This is a more natural way to integrate the vector store's capabilities.
* **`update_metadata` Functionality:**  The `update_knowledge` function now includes the ability to update the metadata of an entry and importantly, it also updates the corresponding metadata in the vector store using `vector_store.update_metadata()`. This keeps the semantic memory and the vector store synchronized.
* **`forget_knowledge` Functionality:** The `forget_knowledge` function now includes the crucial step of removing the corresponding entry from the vector store using `vector_store.delete()`.  This prevents "ghost" entries from being retrieved.
* **Clearer Comments:** Added more comments to explain the logic behind the code.

This revised implementation addresses the requirements more comprehensively and provides a solid foundation for AIVA's semantic memory system.  Remember to replace the `DummyVectorStore` with a real vector store implementation. Also, the confidence decay and vector store similarity functions are placeholders and need to be replaced with more sophisticated methods.
