Antoine101 commited on
Commit
2fab9e1
·
verified ·
1 Parent(s): 741e44b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -12
app.py CHANGED
@@ -17,17 +17,6 @@ from langchain_huggingface.llms import HuggingFacePipeline
17
  from langchain_ollama import ChatOllama
18
  from langchain_google_genai import ChatGoogleGenerativeAI
19
  from langchain_core.rate_limiters import InMemoryRateLimiter
20
- from smolagents import (
21
- InferenceClientModel, LiteLLMModel, OpenAIServerModel, TransformersModel,
22
- CodeAgent,
23
- DuckDuckGoSearchTool,
24
- HfApiModel,
25
- LiteLLMModel,
26
- OpenAIServerModel,
27
- PythonInterpreterTool,
28
- tool,
29
- InferenceClientModel, ToolCallingAgent
30
- )
31
 
32
 
33
  # (Keep Constants as is)
@@ -117,9 +106,11 @@ class BasicAgent:
117
  SystemMessage(content=system_prompt),
118
  HumanMessage(content=question)]
119
  response = self.graph.invoke({"messages": messages})
120
- print(f"RESPONSE {response}")
121
  response = response['messages'][-1].content
122
  print(f"Agent returning answer: {response}")
 
 
 
123
  return response
124
 
125
 
 
17
  from langchain_ollama import ChatOllama
18
  from langchain_google_genai import ChatGoogleGenerativeAI
19
  from langchain_core.rate_limiters import InMemoryRateLimiter
 
 
 
 
 
 
 
 
 
 
 
20
 
21
 
22
  # (Keep Constants as is)
 
106
  SystemMessage(content=system_prompt),
107
  HumanMessage(content=question)]
108
  response = self.graph.invoke({"messages": messages})
 
109
  response = response['messages'][-1].content
110
  print(f"Agent returning answer: {response}")
111
+ final_answer_idx = response.find("FINAL ANSWER: ")
112
+ if final_answer_idx != -1:
113
+ return response[final_answer_idx + len("FINAL ANSWER: "):]
114
  return response
115
 
116