| |
| """Build FAISS index from RAG chunks for retrieval.""" |
|
|
| import json |
| import faiss |
| import numpy as np |
| import pickle |
| from sentence_transformers import SentenceTransformer |
| from pathlib import Path |
|
|
| CHUNKS_PATH = Path("dataset/data/chunks.jsonl") |
| INDEX_DIR = Path("index") |
| INDEX_DIR.mkdir(exist_ok=True) |
|
|
| |
| print("Loading chunks...") |
| chunks = [] |
| with open(CHUNKS_PATH) as f: |
| for line in f: |
| chunks.append(json.loads(line)) |
| print(f"Loaded {len(chunks)} chunks") |
|
|
| |
| texts = [] |
| for c in chunks: |
| parts = [] |
| if c.get("question"): |
| parts.append(f"Q: {c['question']}") |
| parts.append(c["text"]) |
| texts.append("\n".join(parts)) |
|
|
| |
| print("Loading bge-m3...") |
| model = SentenceTransformer("./models/bge-m3") |
|
|
| print("Encoding chunks (this takes a few minutes)...") |
| embeddings = model.encode( |
| texts, |
| batch_size=256, |
| show_progress_bar=True, |
| normalize_embeddings=True, |
| ) |
| embeddings = np.array(embeddings, dtype=np.float32) |
| print(f"Embeddings shape: {embeddings.shape}") |
|
|
| |
| dim = embeddings.shape[1] |
| index = faiss.IndexFlatIP(dim) |
|
|
| |
| |
| |
| |
| |
|
|
| index.add(embeddings) |
| print(f"FAISS index built: {index.ntotal} vectors") |
|
|
| |
| faiss.write_index(index, str(INDEX_DIR / "chunks.faiss")) |
| with open(INDEX_DIR / "chunks_meta.pkl", "wb") as f: |
| pickle.dump(chunks, f) |
|
|
| |
| lexicon = [] |
| with open("dataset/data/lexicon.jsonl") as f: |
| for line in f: |
| lexicon.append(json.loads(line)) |
| with open(INDEX_DIR / "lexicon.pkl", "wb") as f: |
| pickle.dump(lexicon, f) |
|
|
| print(f"Saved index to {INDEX_DIR}/") |
| print(f"Index size: {INDEX_DIR / 'chunks.faiss'}") |
|
|