mmichiels13 commited on
Commit
bd6c5ad
·
verified ·
1 Parent(s): 9c91bee

Add lru_cache for last 20 questions

Browse files
Files changed (1) hide show
  1. app.py +7 -0
app.py CHANGED
@@ -9,6 +9,7 @@ from huggingface_hub import login
9
  from smolagents import CodeAgent, InferenceClientModel
10
  from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
11
  from retriever import load_guest_dataset
 
12
 
13
  # (Keep Constants as is)
14
  # --- Constants ---
@@ -38,7 +39,13 @@ class BasicAgent:
38
  model=self.model,
39
  max_steps=5 # Limit reasoning steps
40
  )
 
41
  def __call__(self, question: str) -> str:
 
 
 
 
 
42
  print(f"Agent received question (first 50 chars): {question[:50]}...")
43
  #fixed_answer = "This is a default answer."
44
  #print(f"Agent returning fixed answer: {fixed_answer}")
 
9
  from smolagents import CodeAgent, InferenceClientModel
10
  from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
11
  from retriever import load_guest_dataset
12
+ from functools import lru_cache
13
 
14
  # (Keep Constants as is)
15
  # --- Constants ---
 
39
  model=self.model,
40
  max_steps=5 # Limit reasoning steps
41
  )
42
+
43
  def __call__(self, question: str) -> str:
44
+ # This just calls the cached version
45
+ return self._run_with_cache(question)
46
+
47
+ @lru_cache(maxsize=20)
48
+ def _run_with_cache(self, question: str) -> str:
49
  print(f"Agent received question (first 50 chars): {question[:50]}...")
50
  #fixed_answer = "This is a default answer."
51
  #print(f"Agent returning fixed answer: {fixed_answer}")