File size: 4,895 Bytes
67ba414 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 | #!/usr/bin/env python3
"""
Dense-only retrieval server for Search training.
Provides E5 embeddings + FAISS dense indexing.
This code is adapted from the RLLM project:
https://github.com/rllm-org/rllm
License: Apache-2.0
Usage:
python server.py --data_dir ./search_data/prebuilt_indices --port 8000
"""
import argparse
import json
import threading
from pathlib import Path
from typing import Any
import torch
import faiss
from flask import Flask, jsonify, request
from sentence_transformers import SentenceTransformer
class LocalRetriever:
"""Dense-only retrieval system using FAISS."""
def __init__(self, data_dir: str, device: str = "cpu", gpu_memory_limit_mb: int = 6144):
self.data_dir = Path(data_dir)
self.corpus = []
self.dense_index = None
self._lock = threading.Lock()
if device.startswith("cuda"):
dev_idx = int(device.split(":")[-1]) if ":" in device else 0
fraction = gpu_memory_limit_mb / (torch.cuda.get_device_properties(dev_idx).total_memory / 1024**2)
fraction = min(fraction, 1.0)
torch.cuda.set_per_process_memory_fraction(fraction, dev_idx)
print(f"GPU memory limit: {gpu_memory_limit_mb}MB (fraction={fraction:.4f}) on cuda:{dev_idx}")
self.encoder = SentenceTransformer("intfloat/e5-base-v2", device=device)
print(f"E5 encoder loaded on device: {device}")
self._load_data()
def _load_data(self):
"""Load corpus and dense index from data directory."""
print(f"Loading data from {self.data_dir}")
# Load corpus
corpus_file = self.data_dir / "corpus.json"
with open(corpus_file) as f:
self.corpus = json.load(f)
print(f"Loaded corpus with {len(self.corpus)} documents")
# Load dense index
dense_index_file = self.data_dir / "e5_Flat.index"
self.dense_index = faiss.read_index(str(dense_index_file))
print(f"Loaded dense index with {self.dense_index.ntotal} vectors")
def search(self, query: str, k: int = 10) -> list[dict[str, Any]]:
"""Dense retrieval using FAISS. Lock serializes GPU encoding to prevent memory bloat."""
with self._lock:
query_vector = self.encoder.encode([f"query: {query}"]).astype("float32")
scores, indices = self.dense_index.search(query_vector, k)
return [{"content": self.corpus[idx], "score": float(score)} for score, idx in zip(scores[0], indices[0], strict=False) if idx < len(self.corpus)]
# Flask app
app = Flask(__name__)
retriever = None
@app.route("/health", methods=["GET"])
def health_check():
"""Health check endpoint."""
return jsonify({"status": "healthy", "corpus_size": len(retriever.corpus), "index_type": "dense_only", "index_loaded": retriever.dense_index is not None})
@app.route("/retrieve", methods=["POST"])
def retrieve():
"""Main retrieval endpoint."""
try:
data = request.get_json()
if not data or "query" not in data:
return jsonify({"error": "Missing 'query' in request"}), 400
query = data["query"]
k = data.get("top_k", data.get("k", 10))
results = retriever.search(query=query, k=k)
formatted_results = [{"id": f"doc_{i}", "content": result["content"], "score": result["score"]} for i, result in enumerate(results, 1)]
return jsonify({"query": query, "method": "dense", "results": formatted_results, "num_results": len(formatted_results)})
except Exception as e:
return jsonify({"error": str(e)}), 500
def main():
parser = argparse.ArgumentParser(description="Dense-only retrieval server")
parser.add_argument("--data_dir", default="./search_data/prebuilt_indices", help="Directory containing corpus and dense index")
parser.add_argument("--host", default="127.0.0.1", help="Host to bind to")
parser.add_argument("--port", type=int, default=8000, help="Port to bind to")
parser.add_argument("--debug", action="store_true", help="Enable debug mode")
parser.add_argument("--device", default="cpu", help="Device for E5 encoder (cpu, cuda, cuda:0, etc.)")
parser.add_argument("--gpu_memory_limit_mb", type=int, default=1024, help="Max GPU memory in MB for E5 encoder (default: 1024)")
args = parser.parse_args()
# Initialize retriever
global retriever
try:
retriever = LocalRetriever(args.data_dir, device=args.device, gpu_memory_limit_mb=args.gpu_memory_limit_mb)
print(f"Dense retrieval server initialized with {len(retriever.corpus)} documents")
except Exception as e:
print(f"Failed to initialize retriever: {e}")
return
# Start server
print(f"Starting dense retrieval server on {args.host}:{args.port}")
app.run(host=args.host, port=args.port, debug=args.debug, threaded=True)
if __name__ == "__main__":
main()
|