Spaces:
Running
Running
File size: 10,782 Bytes
0a4529c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 |
# DEPENDENCIES
import time
import faiss
import numpy as np
from typing import List
from pathlib import Path
from typing import Optional
from config.models import DocumentChunk
from config.settings import get_settings
from config.logging_config import get_logger
from utils.error_handler import handle_errors
from utils.error_handler import IndexingError
from vector_store.bm25_index import BM25Index
from vector_store.faiss_manager import FAISSManager
from vector_store.metadata_store import MetadataStore
# Setup Settings and Logging
settings = get_settings()
logger = get_logger(__name__)
class IndexBuilder:
"""
Main index builder orchestrator: Builds and manages both vector and keyword indexes
Coordinates FAISS vector index, BM25 keyword index, and metadata storage
"""
def __init__(self, vector_store_dir: Optional[Path] = None):
"""
Initialize index builder
Arguments:
----------
vector_store_dir { Path } : Directory for index storage
"""
self.logger = logger
self.vector_store_dir = Path(vector_store_dir or settings.VECTOR_STORE_DIR)
# Initialize component managers
self.faiss_manager = FAISSManager(vector_store_dir = self.vector_store_dir)
self.bm25_index = BM25Index()
self.metadata_store = MetadataStore()
# Index statistics
self.total_chunks_indexed = 0
self.last_build_time = None
self.logger.info(f"Initialized IndexBuilder: store_dir={self.vector_store_dir}")
@handle_errors(error_type = IndexingError, log_error = True, reraise = True)
def build_indexes(self, chunks: List[DocumentChunk], rebuild: bool = False) -> dict:
"""
Build both vector and keyword indexes from document chunks - FIXED VERSION
Arguments:
----------
chunks { list } : List of DocumentChunk objects with embeddings
rebuild { bool } : Whether to rebuild existing indexes
Returns:
--------
{ dict } : Build statistics
"""
if not chunks:
raise IndexingError("No chunks provided for indexing")
# Validate chunks have embeddings
chunks_with_embeddings = [c for c in chunks if (c.embedding is not None)]
if (len(chunks_with_embeddings) != len(chunks)):
self.logger.warning(f"{len(chunks) - len(chunks_with_embeddings)} chunks missing embeddings")
if not chunks_with_embeddings:
raise IndexingError("No chunks with embeddings found")
self.logger.info(f"Building indexes for {len(chunks_with_embeddings)} chunks (rebuild={rebuild})")
start_time = time.time()
# Extract data for indexing
embeddings = self._extract_embeddings(chunks = chunks_with_embeddings)
texts = [chunk.text for chunk in chunks_with_embeddings]
chunk_ids = [chunk.chunk_id for chunk in chunks_with_embeddings]
# Build vector index (FAISS)
self.logger.info("Building FAISS vector index...")
faiss_stats = self.faiss_manager.build_index(embeddings = embeddings,
chunk_ids = chunk_ids,
rebuild = rebuild,
)
# Build keyword index (BM25)
self.logger.info("Building BM25 keyword index...")
bm25_stats = self.bm25_index.build_index(texts = texts,
chunk_ids = chunk_ids,
rebuild = rebuild,
)
# Store metadata
self.logger.info("Storing chunk metadata...")
metadata_stats = self.metadata_store.store_chunks(chunks = chunks_with_embeddings,
rebuild = rebuild,
)
# Update statistics
self.total_chunks_indexed += len(chunks_with_embeddings)
self.last_build_time = time.time()
build_time = time.time() - start_time
stats = {"total_chunks" : len(chunks_with_embeddings),
"build_time_seconds" : build_time,
"chunks_per_second" : len(chunks_with_embeddings) / build_time if build_time > 0 else 0,
"faiss" : faiss_stats,
"bm25" : bm25_stats,
"metadata" : metadata_stats,
"vector_dimension" : embeddings.shape[1] if (len(embeddings) > 0) else 0,
}
self.logger.info(f"Index building completed: {len(chunks_with_embeddings)} chunks in {build_time:.2f}s")
self.logger.info(f"FAISS index: {faiss_stats.get('vectors', 0)} vectors")
self.logger.info(f"BM25 index: {bm25_stats.get('documents', 0)} documents")
self.logger.info(f"Metadata: {metadata_stats.get('stored_chunks', 0)} chunks stored")
return stats
def _extract_embeddings(self, chunks: List[DocumentChunk]) -> np.ndarray:
"""
Extract embeddings from chunks as numpy array
Arguments:
----------
chunks { list } : List of DocumentChunk objects
Returns:
--------
{ np.ndarray } : Embeddings matrix
"""
embeddings = list()
for chunk in chunks:
if (chunk.embedding is not None):
embeddings.append(chunk.embedding)
if not embeddings:
raise IndexingError("No embeddings found in chunks")
return np.array(embeddings).astype('float32')
def get_index_stats(self) -> dict:
"""
Get comprehensive index statistics
Returns:
--------
{ dict } : Index statistics
"""
faiss_stats = self.faiss_manager.get_index_stats()
bm25_stats = self.bm25_index.get_index_stats()
metadata_stats = self.metadata_store.get_stats()
# Also check VectorSearch stats
try:
vector_search = get_vector_search()
vector_stats = vector_search.get_index_stats()
except Exception as e:
vector_stats = {"error": str(e)}
stats = {"total_chunks_indexed" : self.total_chunks_indexed,
"last_build_time" : self.last_build_time,
"faiss" : faiss_stats,
"bm25" : bm25_stats,
"metadata" : metadata_stats,
"index_directory" : str(self.vector_store_dir),
}
return stats
def is_index_built(self) -> bool:
"""
Check if indexes are built and ready
Returns:
--------
{ bool } : True if indexes are built
"""
faiss_ready = self.faiss_manager.is_index_built()
bm25_ready = self.bm25_index.is_index_built()
metadata_ready = self.metadata_store.is_ready()
return faiss_ready and bm25_ready and metadata_ready
def optimize_indexes(self) -> dict:
"""
Optimize indexes for better performance
Returns:
--------
{ dict } : Optimization results
"""
self.logger.info("Optimizing indexes")
faiss_optimization = self.faiss_manager.optimize_index()
bm25_optimization = self.bm25_index.optimize_index()
optimization_stats = {"faiss" : faiss_optimization,
"bm25" : bm25_optimization,
"message" : "Index optimization completed",
}
return optimization_stats
def clear_indexes(self):
"""
Clear all indexes
"""
self.logger.warning("Clearing all indexes")
self.faiss_manager.clear_index()
self.bm25_index.clear_index()
self.metadata_store.clear()
self.total_chunks_indexed = 0
def get_index_size(self) -> dict:
"""
Get index sizes in memory and disk
Returns:
--------
{ dict } : Size information
"""
faiss_size = self.faiss_manager.get_index_size()
bm25_size = self.bm25_index.get_index_size()
metadata_size = self.metadata_store.get_size()
total_memory = (faiss_size.get("memory_mb", 0) + bm25_size.get("memory_mb", 0) + metadata_size.get("memory_mb", 0))
total_disk = (faiss_size.get("disk_mb", 0) + bm25_size.get("disk_mb", 0) + metadata_size.get("disk_mb", 0))
return {"total_memory_mb" : total_memory,
"total_disk_mb" : total_disk,
"faiss" : faiss_size,
"bm25" : bm25_size,
"metadata" : metadata_size,
}
# Global index builder instance
_index_builder = None
def get_index_builder(vector_store_dir: Optional[Path] = None) -> IndexBuilder:
"""
Get global index builder instance
Arguments:
----------
vector_store_dir { Path } : Vector store directory
Returns:
--------
{ IndexBuilder } : IndexBuilder instance
"""
global _index_builder
if _index_builder is None:
_index_builder = IndexBuilder(vector_store_dir)
return _index_builder
def build_indexes(chunks: List[DocumentChunk], **kwargs) -> dict:
"""
Convenience function to build indexes
Arguments:
----------
chunks { list } : List of DocumentChunk objects
**kwargs : Additional arguments
Returns:
--------
{ dict } : Build statistics
"""
builder = get_index_builder()
return builder.build_indexes(chunks, **kwargs) |