Spaces:
Sleeping
Sleeping
fixes
Browse files- src/search_final.py +10 -4
src/search_final.py
CHANGED
|
@@ -33,7 +33,9 @@ os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
|
| 33 |
# ---------------- Paths & Models ----------------
|
| 34 |
EMBED_MODEL = "sentence-transformers/all-MiniLM-L6-v2"
|
| 35 |
CROSS_ENCODER = "cross-encoder/ms-marco-MiniLM-L-6-v2"
|
| 36 |
-
|
|
|
|
|
|
|
| 37 |
|
| 38 |
FAISS_PATH = os.path.join(OUT_DIR, "faiss_merged.index")
|
| 39 |
BM25_PATH = os.path.join(OUT_DIR, "bm25_merged.pkl")
|
|
@@ -67,9 +69,10 @@ try:
|
|
| 67 |
reranker = CrossEncoder(CROSS_ENCODER)
|
| 68 |
api_key = os.getenv("HF_API_KEY")
|
| 69 |
if not api_key:
|
| 70 |
-
logger.
|
| 71 |
-
|
| 72 |
-
|
|
|
|
| 73 |
base_url="https://router.huggingface.co/v1",
|
| 74 |
api_key=api_key
|
| 75 |
)
|
|
@@ -86,6 +89,9 @@ def get_mistral_answer(query: str, context: str) -> str:
|
|
| 86 |
Calls Mistral 7B Instruct API via Hugging Face Inference API.
|
| 87 |
Adds error handling and logging.
|
| 88 |
"""
|
|
|
|
|
|
|
|
|
|
| 89 |
prompt = f"Context:\n{context}\n\nQuestion: {query}\nAnswer in full sentences using context."
|
| 90 |
try:
|
| 91 |
logger.info(f"Calling Mistral API for query: {query}")
|
|
|
|
| 33 |
# ---------------- Paths & Models ----------------
|
| 34 |
EMBED_MODEL = "sentence-transformers/all-MiniLM-L6-v2"
|
| 35 |
CROSS_ENCODER = "cross-encoder/ms-marco-MiniLM-L-6-v2"
|
| 36 |
+
# Get the directory where this script is located
|
| 37 |
+
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
|
| 38 |
+
OUT_DIR = os.path.join(SCRIPT_DIR, "data", "index_merged")
|
| 39 |
|
| 40 |
FAISS_PATH = os.path.join(OUT_DIR, "faiss_merged.index")
|
| 41 |
BM25_PATH = os.path.join(OUT_DIR, "bm25_merged.pkl")
|
|
|
|
| 69 |
reranker = CrossEncoder(CROSS_ENCODER)
|
| 70 |
api_key = os.getenv("HF_API_KEY")
|
| 71 |
if not api_key:
|
| 72 |
+
logger.warning("HF_API_KEY environment variable not set. Mistral API features will not be available.")
|
| 73 |
+
client = None # Set client to None when API key is not available
|
| 74 |
+
else:
|
| 75 |
+
client = OpenAI(
|
| 76 |
base_url="https://router.huggingface.co/v1",
|
| 77 |
api_key=api_key
|
| 78 |
)
|
|
|
|
| 89 |
Calls Mistral 7B Instruct API via Hugging Face Inference API.
|
| 90 |
Adds error handling and logging.
|
| 91 |
"""
|
| 92 |
+
if client is None:
|
| 93 |
+
return "Mistral API is not available. Please set HF_API_KEY environment variable to use AI-powered responses."
|
| 94 |
+
|
| 95 |
prompt = f"Context:\n{context}\n\nQuestion: {query}\nAnswer in full sentences using context."
|
| 96 |
try:
|
| 97 |
logger.info(f"Calling Mistral API for query: {query}")
|