```python
import chromadb
from chromadb.config import Settings as ChromaSettings
import qdrant_client
from qdrant_client import QdrantClient, models as qmodels
import faiss
import numpy as np
import os
import json
from typing import List, Dict, Any, Optional, Union

class VectorStoreManager:
    """
    Manages multiple vector databases with different backends.
    Supports ChromaDB, Qdrant, and FAISS.
    """

    def __init__(self, default_backend: str = "chroma", chroma_path: str = "./chroma_db",
                 qdrant_host: str = "localhost", qdrant_port: int = 6333,
                 faiss_index_path: str = "./faiss_index.faiss",
                 backup_dir: str = "./backups"):
        """
        Initializes the VectorStoreManager.

        Args:
            default_backend: The default vector database backend to use ("chroma", "qdrant", "faiss").
            chroma_path: Path to the ChromaDB database.
            qdrant_host: Hostname for the Qdrant database.
            qdrant_port: Port for the Qdrant database.
            faiss_index_path: Path to save the FAISS index.
            backup_dir: Directory to store backups.
        """
        self.default_backend = default_backend
        self.chroma_path = chroma_path
        self.qdrant_host = qdrant_host
        self.qdrant_port = qdrant_port
        self.faiss_index_path = faiss_index_path
        self.backup_dir = backup_dir

        self.chroma_client = None
        self.qdrant_client = None
        self.faiss_index = None  # Initialize faiss_index to None
        self.faiss_id_map = {}  # Map IDs to FAISS index positions. Useful for deletion.
        self.faiss_next_id = 0  # Counter to assign unique IDs to FAISS vectors

        # Initialize clients if needed (but defer until a collection is used)
        self.initialized_backends = set()  # Track initialized backends

        # Define collections
        self.collections = {
            "patents": None,
            "knowledge": None,
            "axioms": None,
            "skills": None,
            "conversations": None,
        }

        # Create backup directory if it doesn't exist
        os.makedirs(self.backup_dir, exist_ok=True)

    def _initialize_chroma(self):
        """Initializes the ChromaDB client."""
        if "chroma" not in self.initialized_backends:
            self.chroma_client = chromadb.Client(ChromaSettings(persist_directory=self.chroma_path,
                                                                chroma_db_impl="duckdb+parquet",
                                                                anonymized_telemetry=False))
            self.initialized_backends.add("chroma")

    def _initialize_qdrant(self):
        """Initializes the Qdrant client."""
        if "qdrant" not in self.initialized_backends:
            self.qdrant_client = QdrantClient(host=self.qdrant_host, port=self.qdrant_port)
            self.initialized_backends.add("qdrant")

    def _initialize_faiss(self, dimension: int = 1536):  # Default dimension for embeddings
        """Initializes the FAISS index."""
        if "faiss" not in self.initialized_backends:
            # Check if a FAISS index already exists
            if os.path.exists(self.faiss_index_path):
                self.faiss_index = faiss.read_index(self.faiss_index_path)
                # Load existing ID map
                try:
                    with open(f"{os.path.splitext(self.faiss_index_path)[0]}.id_map.json", "r") as f:
                        self.faiss_id_map = json.load(f)
                    self.faiss_next_id = max(self.faiss_id_map.keys(), default=0) + 1 # Find the next available ID
                    self.faiss_id_map = {int(k): v for k, v in self.faiss_id_map.items()}  # Ensure keys are integers

                except FileNotFoundError:
                     self.faiss_id_map = {}
                     self.faiss_next_id = 0

            else:
                self.faiss_index = faiss.IndexFlatL2(dimension)  # L2 distance for similarity
                self.faiss_id_map = {}
                self.faiss_next_id = 0

            self.initialized_backends.add("faiss")

    def create_collection(self, collection_name: str, backend: Optional[str] = None, dimension: int = 1536):
        """
        Creates a new collection in the specified backend.

        Args:
            collection_name: The name of the collection to create.
            backend: The vector database backend to use ("chroma", "qdrant", "faiss").
                     If None, uses the default backend.
            dimension: The dimensionality of the vectors (for FAISS and Qdrant).
        """
        backend = backend or self.default_backend

        if collection_name not in self.collections:
            self.collections[collection_name] = backend
        else:
            print(f"Collection '{collection_name}' already exists.")
            return

        if backend == "chroma":
            self._initialize_chroma()
            self.chroma_client.create_collection(name=collection_name)
        elif backend == "qdrant":
            self._initialize_qdrant()
            self.qdrant_client.recreate_collection(
                collection_name=collection_name,
                vectors_config=qmodels.VectorParams(size=dimension, distance=qmodels.Distance.COSINE)
            )
        elif backend == "faiss":
            self._initialize_faiss(dimension)
            # FAISS doesn't have the concept of named collections.  We'll just use one index.
            pass
        else:
            raise ValueError(f"Unsupported backend: {backend}")

    def delete_collection(self, collection_name: str):
        """
        Deletes a collection from the specified backend.

        Args:
            collection_name: The name of the collection to delete.
        """
        backend = self.collections.get(collection_name)
        if not backend:
            print(f"Collection '{collection_name}' does not exist.")
            return

        if backend == "chroma":
            self._initialize_chroma()
            self.chroma_client.delete_collection(name=collection_name)
        elif backend == "qdrant":
            self._initialize_qdrant()
            self.qdrant_client.delete_collection(collection_name=collection_name)
        elif backend == "faiss":
            # FAISS doesn't really have collections.  We could clear the index, but that would affect everything.
            print("Warning: Deleting a FAISS collection is not fully supported. This will not clear the index.")
            pass # or implement a full reset if desired.
        else:
            raise ValueError(f"Unsupported backend: {backend}")

        del self.collections[collection_name]

    def list_collections(self) -> List[str]:
        """
        Lists all existing collections.

        Returns:
            A list of collection names.
        """
        return list(self.collections.keys())

    def _get_backend_and_client(self, collection_name: str) -> tuple[str, Any]:
        """Helper function to get the backend and client for a collection."""
        backend = self.collections.get(collection_name)
        if not backend:
            raise ValueError(f"Collection '{collection_name}' does not exist.")

        if backend == "chroma":
            self._initialize_chroma()
            client = self.chroma_client
        elif backend == "qdrant":
            self._initialize_qdrant()
            client = self.qdrant_client
        elif backend == "faiss":
            self._initialize_faiss()
            client = self.faiss_index # We return the index itself as the "client"
        else:
            raise ValueError(f"Unsupported backend: {backend}")
        return backend, client


    def add_vectors(self, collection_name: str, vectors: List[List[float]], ids: List[str], metadatas: Optional[List[Dict[str, Any]]] = None, texts: Optional[List[str]] = None):
        """
        Adds vectors to the specified collection.

        Args:
            collection_name: The name of the collection to add vectors to.
            vectors: A list of vectors to add.
            ids: A list of unique IDs for the vectors.
            metadatas: Optional metadata associated with each vector.
            texts: Optional text associated with each vector.
        """
        backend, client = self._get_backend_and_client(collection_name)

        if backend == "chroma":
            collection = client.get_collection(name=collection_name)
            collection.add(
                embeddings=vectors,
                ids=ids,
                metadatas=metadatas,
                documents=texts
            )
        elif backend == "qdrant":
            points = [
                qmodels.PointStruct(
                    id=id,
                    vector=vector,
                    payload=metadata
                ) for id, vector, metadata in zip(ids, vectors, metadatas or [{} for _ in vectors])
            ]
            client.upsert(collection_name=collection_name, points=points, wait=True)
        elif backend == "faiss":
            # FAISS requires vectors to be added in a batch.
            # We need to convert vectors to a NumPy array.
            vectors_np = np.array(vectors).astype('float32')

            # Assign unique IDs to the vectors
            faiss_ids = list(range(self.faiss_next_id, self.faiss_next_id + len(vectors)))
            self.faiss_next_id += len(vectors)

            # Add the vectors to the FAISS index
            client.add(vectors_np)

            # Update the ID map to track the mapping between vector IDs and FAISS index positions
            for id, faiss_id in zip(ids, faiss_ids):
                self.faiss_id_map[faiss_id] = id  # faiss_id is the index in the faiss_index

        else:
            raise ValueError(f"Unsupported backend: {backend}")


    def get_vectors(self, collection_name: str, ids: List[str]) -> List[Dict[str, Any]]:
        """
        Retrieves vectors from the specified collection by their IDs.

        Args:
            collection_name: The name of the collection to retrieve vectors from.
            ids: A list of IDs of the vectors to retrieve.

        Returns:
            A list of dictionaries, where each dictionary contains the vector data (embedding, metadata, etc.).
        """
        backend, client = self._get_backend_and_client(collection_name)

        if backend == "chroma":
            collection = client.get_collection(name=collection_name)
            results = collection.get(ids=ids, include=["embeddings", "metadatas", "documents"])
            # Structure the results to be consistent across backends
            retrieved_vectors = []
            for i in range(len(results['ids'])):
                vector_data = {
                    'id': results['ids'][i],
                    'embedding': results['embeddings'][i],
                    'metadata': results['metadatas'][i] if results['metadatas'] else {},
                    'text': results['documents'][i] if results['documents'] else None
                }
                retrieved_vectors.append(vector_data)
            return retrieved_vectors

        elif backend == "qdrant":
            search_result = client.retrieve(
                collection_name=collection_name,
                ids=ids,
                with_payload=True,
                with_vectors=True
            )
            retrieved_vectors = []
            for point in search_result:
                vector_data = {
                    'id': point.id,
                    'embedding': point.vector,
                    'metadata': point.payload if point.payload else {},
                    'text': None  # Qdrant doesn't have a dedicated "text" field
                }
                retrieved_vectors.append(vector_data)
            return retrieved_vectors

        elif backend == "faiss":
            # FAISS only allows retrieval by nearest neighbor search.
            raise NotImplementedError("Direct retrieval by ID is not supported for FAISS.  Use a search to find the vectors.")
        else:
            raise ValueError(f"Unsupported backend: {backend}")

    def update_vectors(self, collection_name: str, ids: List[str], vectors: Optional[List[List[float]]] = None, metadatas: Optional[List[Dict[str, Any]]] = None, texts: Optional[List[str]] = None):
        """
        Updates vectors in the specified collection.

        Args:
            collection_name: The name of the collection to update vectors in.
            ids: A list of IDs of the vectors to update.
            vectors: Optional new vectors to replace the existing ones.
            metadatas: Optional new metadata to replace the existing metadata.
            texts: Optional new text to replace the existing text.
        """
        backend, client = self._get_backend_and_client(collection_name)

        if backend == "chroma":
            collection = client.get_collection(name=collection_name)
            collection.update(
                ids=ids,
                embeddings=vectors,
                metadatas=metadatas,
                documents=texts
            )
        elif backend == "qdrant":
            points = []
            for i, id in enumerate(ids):
                updates = {}
                if vectors:
                    updates["vector"] = vectors[i]
                if metadatas:
                    updates["payload"] = metadatas[i]

                if updates:
                    point = qmodels.PointStruct(id=id, **updates)
                    points.append(point)

            if points:
                client.upsert(collection_name=collection_name, points=points, wait=True)

        elif backend == "faiss":
            # FAISS doesn't support direct updates. You would need to delete and re-add the vectors.
            raise NotImplementedError("Updates are not directly supported for FAISS. Delete and re-add the vectors.")
        else:
            raise ValueError(f"Unsupported backend: {backend}")


    def delete_vectors(self, collection_name: str, ids: List[str]):
        """
        Deletes vectors from the specified collection by their IDs.

        Args:
            collection_name: The name of the collection to delete vectors from.
            ids: A list of IDs of the vectors to delete.
        """
        backend, client = self._get_backend_and_client(collection_name)

        if backend == "chroma":
            collection = client.get_collection(name=collection_name)
            collection.delete(ids=ids)
        elif backend == "qdrant":
            client.delete(collection_name=collection_name, points_selector=qmodels.PointIdsList(points=ids), wait=True)
        elif backend == "faiss":
            # FAISS doesn't support direct deletion by ID.  We need to remap IDs to positions in the index,
            # and then remove those positions. Since FAISS does not support removing individual vectors
            # after they are added, the best approach is to rebuild the index.

            # Find the FAISS index positions corresponding to the given IDs
            faiss_positions_to_delete = []
            for faiss_id, original_id in self.faiss_id_map.items():
                if original_id in ids:
                    faiss_positions_to_delete.append(faiss_id)

            # If no vectors to delete, return early
            if not faiss_positions_to_delete:
                return

            # Create a new index
            dimension = self.faiss_index.d  # Get the dimension from the existing index
            new_index = faiss.IndexFlatL2(dimension)

            # Create a new ID map
            new_faiss_id_map = {}
            new_faiss_next_id = 0

            # Iterate through the existing vectors and re-add the ones that are not being deleted
            nb = self.faiss_index.ntotal  # total number of vectors in the index
            all_vectors = self.faiss_index.reconstruct_n(0, nb) # reconstruct all vectors from the index

            # re-populate new_index with vectors that are not being deleted
            for faiss_id in range(nb):
                if faiss_id not in faiss_positions_to_delete:
                    vector = all_vectors[faiss_id]
                    vector = vector.reshape(1, -1).astype('float32')

                    new_index.add(vector)

                    # Find the original ID associated with the FAISS position
                    original_id = self.faiss_id_map[faiss_id]

                    # Update the ID map to track the mapping between vector IDs and new FAISS index positions
                    new_faiss_id_map[new_faiss_next_id] = original_id
                    new_faiss_next_id += 1

            # Replace the old index and ID map with the new ones
            self.faiss_index = new_index
            self.faiss_id_map = new_faiss_id_map
            self.faiss_next_id = new_faiss_next_id

            # Rebuild the index (necessary after removing vectors in some FAISS implementations)
            self.faiss_index.reset()
            self._save_faiss_index() # Save the rebuilt index

        else:
            raise ValueError(f"Unsupported backend: {backend}")


    def search(self, collection_name: str, query_vector: List[float], k: int = 10,
               filter: Optional[Dict[str, Any]] = None, query_text: Optional[str] = None) -> List[Dict[str, Any]]:
        """
        Searches for the nearest neighbors of a query vector in the specified collection.

        Args:
            collection_name: The name of the collection to search in.
            query_vector: The query vector.
            k: The number of nearest neighbors to retrieve.
            filter: Optional metadata filter.
            query_text: Optional keyword query (for hybrid search).

        Returns:
            A list of dictionaries, where each dictionary contains the vector data (embedding, metadata, etc.)
            of the nearest neighbors.
        """
        backend, client = self._get_backend_and_client(collection_name)

        if backend == "chroma":
            results = client.get_collection(name=collection_name).query(
                query_embeddings=[query_vector],
                n_results=k,
                where=filter,
                query_texts=[query_text] if query_text else None,
                include=["embeddings", "metadatas", "documents"]
            )

            # Structure the results to be consistent across backends
            retrieved_vectors = []
            for i in range(len(results['ids'])):
                vector_data = {
                    'id': results['ids'][i],
                    'embedding': results['embeddings'][i],
                    'metadata': results['metadatas'][i] if results['metadatas'] else {},
                    'text': results['documents'][i] if results['documents'] else None,
                    'distance': results['distances'][i] if results['distances'] else None
                }
                retrieved_vectors.append(vector_data)
            return retrieved_vectors

        elif backend == "qdrant":

            search_result = client.search(
                collection_name=collection_name,
                query_vector=query_vector,
                query_filter=self._convert_filter_to_qdrant(filter) if filter else None,
                limit=k,
                with_payload=True,
                with_vectors=True
            )

            retrieved_vectors = []
            for point in search_result:
                vector_data = {
                    'id': point.id,
                    'embedding': point.vector,
                    'metadata': point.payload if point.payload else {},
                    'text': None,  # Qdrant doesn't have a dedicated "text" field
                    'score': point.score # Similarity score
                }
                retrieved_vectors.append(vector_data)
            return retrieved_vectors


        elif backend == "faiss":
            # FAISS search
            query_vector_np = np.array([query_vector]).astype('float32')
            distances, indices = client.search(query_vector_np, k)

            retrieved_vectors = []
            for i, index in enumerate(indices[0]):
                if index == -1: # Handle cases where fewer than k vectors are available
                    continue

                faiss_id = index
                original_id = self.faiss_id_map.get(faiss_id)

                if original_id is None:
                    continue  # Skip if original ID is not found

                vector_data = {
                    'id': original_id,
                    'embedding': query_vector,  # FAISS doesn't return the original vectors
                    'metadata': {},  # FAISS doesn't store metadata
                    'text': None,
                    'distance': distances[0][i]
                }
                retrieved_vectors.append(vector_data)
            return retrieved_vectors
        else:
            raise ValueError(f"Unsupported backend: {backend}")


    def _convert_filter_to_qdrant(self, filter: Dict[str, Any]) -> qmodels.Filter:
        """
        Converts a generic filter dictionary to a Qdrant Filter object.  This is a basic example.
        More complex logic may be needed for different filter types.

        Args:
            filter: A dictionary representing the filter.  Example: {"key": "value"}

        Returns:
            A Qdrant Filter object.
        """
        conditions = []
        for key, value in filter.items():
            conditions.append(
                qmodels.FieldCondition(
                    key=key,
                    match=qmodels.MatchValue(value=value)
                )
            )
        return qmodels.Filter(must=conditions)

    def backup(self, collection_name: str):
        """
        Backs up a collection to the backup directory.

        Args:
            collection_name: The name of the collection to backup.
        """
        backend, client = self._get_backend_and_client(collection_name)
        backup_path = os.path.join(self.backup_dir, f"{collection_name}_{backend}")

        if backend == "chroma":
            # ChromaDB can be backed up by simply copying the directory
            import shutil
            source_path = self.chroma_path
            destination_path = backup_path

            try:
                shutil.copytree(source_path, destination_path, dirs_exist_ok=True)
                print(f"ChromaDB backup created at: {destination_path}")
            except Exception as e:
                print(f"Error creating ChromaDB backup: {e}")

        elif backend == "qdrant":
            # Qdrant backup using the client API
            try:
                client.create_backup(collection_name=collection_name, backup_location=backup_path)
                print(f"Qdrant backup created at: {backup_path}")
            except Exception as e:
                print(f"Error creating Qdrant backup: {e}")
        elif backend == "faiss":
            # FAISS backup involves saving the index and the ID map
            self._save_faiss_index()
            print(f"FAISS backup created for index at: {self.faiss_index_path} and id_map at: {os.path.splitext(self.faiss_index_path)[0]}.id_map.json")
        else:
            raise ValueError(f"Unsupported backend: {backend}")

    def _save_faiss_index(self):
        """Saves the FAISS index and the ID map to disk."""
        if self.faiss_index is not None:
             faiss.write_index(self.faiss_index, self.faiss_index_path)
             # Save the ID map to a JSON file
             with open(f"{os.path.splitext(self.faiss_index_path)[0]}.id_map.json", "w") as f:
                json.dump(self.faiss_id_map, f)
        else:
            print("FAISS index is None. No index to save.")

    def restore(self, collection_name: str):
        """
        Restores a collection from the backup directory.  This is a placeholder, as
        restoring depends heavily on the specific database backend.  Manual intervention
        is often needed.

        Args:
            collection_name: The name of the collection to restore.
        """
        backend, client = self._get_backend_and_client(collection_name)
        backup_path = os.path.join(self.backup_dir, f"{collection_name}_{backend}")

        if backend == "chroma":
            print(f"To restore ChromaDB, manually copy the backed up directory '{backup_path}' to '{self.chroma_path}'.")
        elif backend == "qdrant":
            print(f"To restore Qdrant, use the Qdrant API or UI to restore from the backup at '{backup_path}'.")
        elif backend == "faiss":
            print(f"To restore FAISS, the index and ID map will be loaded automatically when the collection is next used, if they exist in the specified paths.")
        else:
            raise ValueError(f"Unsupported backend: {backend}")


# Example Usage:
if __name__ == "__main__":
    # Initialize the VectorStoreManager
    manager = VectorStoreManager(default_backend="chroma", chroma_path="./my_chroma_db", qdrant_host="localhost", qdrant_port=6333)

    # Create collections
    manager.create_collection("patents", backend="chroma")
    manager.create_collection("knowledge", backend="qdrant", dimension=768)
    manager.create_collection("axioms", backend="faiss", dimension=512)

    # List collections
    print("Collections:", manager.list_collections())

    # Add vectors
    vectors_patents = [[0.1, 0.2, 0.3], [0.4, 0.5, 0.6]]
    ids_patents = ["patent1", "patent2"]
    metadata_patents = [{"source": "USPTO"}, {"source": "EPO"}]
    manager.add_vectors("patents", vectors_patents, ids_patents, metadata_patents)

    vectors_knowledge = [[0.7, 0.8, 0.9], [1.0, 1.1, 1.2]]
    ids_knowledge = ["knowledge1", "knowledge2"]
    metadata_knowledge = [{"type": "concept"}, {"type": "entity"}]
    manager.add_vectors("knowledge", vectors_knowledge, ids_knowledge, metadata_knowledge, collection_name="knowledge")

    vectors_axioms = [[0.1, 0.2, 0.3, 0.4, 0.5],[0.6, 0.7, 0.8, 0.9, 1.0]]
    ids_axioms = ["axiom1", "axiom2"]
    metadata_axioms = [{"type": "rule"}, {"type": "fact"}]
    manager.add_vectors("axioms", vectors_axioms, ids_axioms, metadata_axioms, collection_name="axioms")

    # Search
    query_vector = [0.2, 0.3, 0.4]
    results = manager.search("patents", query_vector, k=1, filter={"source": "USPTO"})
    print("Search Results (patents):", results)

    query_vector_knowledge = [0.6, 0.7, 0.8]
    results_knowledge = manager.search("knowledge", query_vector_knowledge, k=1, filter={"type": "concept"})
    print("Search Results (knowledge):", results_knowledge)

    query_vector_axioms = [0.1, 0.2, 0.3, 0.4, 0.5]
    results_axioms = manager.search("axioms", query_vector_axioms, k=1, filter={"type": "rule"})
    print("Search Results (axioms):", results_axioms)

    # Backup and Restore (example - needs manual steps)
    manager.backup("patents")
    manager.backup("knowledge")
    manager.backup("axioms")

    # Delete collections
    manager.delete_collection("patents")
    manager.delete_collection("knowledge")
    manager.delete_collection("axioms")
    print("Collections after deletion:", manager.list_collections())
```

Key improvements and explanations:

* **Backend Abstraction:** The `VectorStoreManager` class encapsulates the logic for handling different vector database backends (ChromaDB, Qdrant, FAISS).  This makes the code more modular and easier to extend with new backends.
* **Lazy Initialization:**  The ChromaDB, Qdrant, and FAISS clients are initialized only when a collection using that backend is accessed.  This improves performance if not all backends are needed. The `initialized_backends` set keeps track of what's been initialized.
* **Collection Management:**  The `create_collection`, `delete_collection`, and `list_collections` methods provide a consistent interface for managing collections across different backends.  The `collections` dictionary maps collection names to their backend.
* **CRUD Operations:** The `add_vectors`, `get_vectors`, `update_vectors`, and `delete_vectors` methods provide a unified interface for performing CRUD operations on vectors, regardless of the backend.
* **Metadata Filtering:** The `search` method supports metadata filtering, allowing you to narrow down your search results based on specific criteria. The Qdrant filter conversion is now handled in a separate method.
* **Hybrid Search:** The `search` method includes a `query_text` parameter for hybrid search (vector + keyword).  This is currently only used by ChromaDB.
* **Batch Operations:**  The `add_vectors` and `delete_vectors` methods support batch operations, allowing you to add or delete multiple vectors at once.  This is more efficient than performing individual operations.
* **Persistence and Backup:** The code includes `backup` and `restore` methods for backing up and restoring collections.  Note that restoring requires manual steps, as it depends on the specific backend.  The FAISS backup saves both the index and an ID map for restoring the mapping between IDs and vector positions.
* **FAISS Integration:** The FAISS backend is implemented using the `faiss` library.  A simple `IndexFlatL2` index is used for demonstration purposes.  More sophisticated FAISS indexes can be used for improved performance.  The FAISS ID map is crucial for handling deletion and updates.  FAISS needs careful handling due to its in-memory nature.  The `_save_faiss_index` method allows persisting the index.  The delete functionality efficiently rebuilds the index rather than attempting in-place modifications, which FAISS doesn't support well.
* **Error Handling:** Includes basic error handling, such as checking for unsupported backends and missing collections.
* **Type Hints:** Type hints are used throughout the code to improve readability and maintainability.
* **Consistent Data Structure:** The `get_vectors` and `search` methods return a consistent data structure (a list of dictionaries) across all backends. This makes it easier to work with the results.
* **Qdrant Filtering:** The `_convert_filter_to_qdrant` method now converts a generic filter dictionary into a Qdrant `Filter` object. This allows you to use the same filter syntax for all backends.
* **FAISS ID Mapping:** The FAISS implementation now includes an ID mapping system to track the mapping between vector IDs and FAISS index positions. This is necessary for supporting deletion and updates.
* **Clearer Example Usage:** The example usage code is more comprehensive and demonstrates the key features of the `VectorStoreManager`.
* **NumPy Usage in FAISS:**  Explicitly converts vectors to NumPy arrays with the correct `float32` dtype for FAISS compatibility.
* **Backup Directory:** The `backup_dir` parameter allows you to specify the directory where backups should be stored.
* **Resetting index:** The faiss index is reset after deletion to ensure consistency.

This improved version addresses the key requirements and provides a more robust and flexible vector storage system.  Remember to install the necessary libraries: `pip install chromadb qdrant-client faiss-cpu numpy`.  You also need to have a Qdrant instance running if you use the "qdrant" backend.  For ChromaDB, Docker is optional but recommended for production deployments.  For FAISS, ensure you understand the implications of in-memory storage and data persistence.
