AgamP commited on
Commit
30d16ab
·
verified ·
1 Parent(s): be305b9

Update agent/server.py

Browse files
Files changed (1) hide show
  1. agent/server.py +3 -3
agent/server.py CHANGED
@@ -38,6 +38,8 @@ from tools.rerank_tool import rerank_candidates
38
  from tools.constraints_tool import apply_constraints
39
 
40
 
 
 
41
  class ChatRequest(BaseModel):
42
  query: str
43
  clarification_answer: Optional[str] = None
@@ -178,9 +180,7 @@ def _format_test_types(meta: dict) -> list[str]:
178
 
179
 
180
  def _run_pipeline(query: str, topn: int = 200, verbose: bool = False, llm_model: Optional[str] = None):
181
- if verbose:
182
- # For debugging, bypass cached resources to ensure fresh state
183
- load_resources.cache_clear()
184
  df_catalog, bm25, vec, reranker, lookup, vocab, llm_extractor, catalog_by_id = load_resources(llm_model_override=llm_model)
185
  plan = _build_plan_with_fallback(query, vocab=vocab, llm_extractor=llm_extractor)
186
  cand_set = retrieve_candidates(plan, bm25, vec, topn=topn, catalog_df=df_catalog)
 
38
  from tools.constraints_tool import apply_constraints
39
 
40
 
41
+ os.environ.setdefault("HF_HOME", "/home/user/.cache/huggingface")
42
+
43
  class ChatRequest(BaseModel):
44
  query: str
45
  clarification_answer: Optional[str] = None
 
180
 
181
 
182
  def _run_pipeline(query: str, topn: int = 200, verbose: bool = False, llm_model: Optional[str] = None):
183
+
 
 
184
  df_catalog, bm25, vec, reranker, lookup, vocab, llm_extractor, catalog_by_id = load_resources(llm_model_override=llm_model)
185
  plan = _build_plan_with_fallback(query, vocab=vocab, llm_extractor=llm_extractor)
186
  cand_set = retrieve_candidates(plan, bm25, vec, topn=topn, catalog_df=df_catalog)