from sentence_transformers import SentenceTransformer import faiss import numpy as np # Load your embedding model once embedding_model = SentenceTransformer("all-MiniLM-L6-v2") # Load FAISS index from disk (or create it on startup) faiss_index = faiss.read_index("dr_faiss.index") # Map of vector index to DR IDs dr_id_map = {} # Load from persistent storage def vector_search(query: str, top_k=3) -> list: vec = embedding_model.encode([query], convert_to_numpy=True) D, I = faiss_index.search(vec, top_k) results = [] for dist, idx in zip(D[0], I[0]): dr_id = dr_id_map.get(idx) if dr_id: results.append({"id": dr_id, "distance": dist}) return results