Isaacgv commited on
Commit
96e630a
·
verified ·
1 Parent(s): 39189cc

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +3 -33
agent.py CHANGED
@@ -112,22 +112,7 @@ def arvix_search(query: str) -> str:
112
 
113
 
114
 
115
- system_prompt= '''
116
- You are a helpful assistant with access to tools for answering questions. For each question, think step by step and clearly explain your reasoning. Conclude your response using the following format:
117
 
118
- FINAL ANSWER: [YOUR FINAL ANSWER]
119
-
120
- Formatting rules for YOUR FINAL ANSWER:
121
-
122
- If the answer is a number, write it as digits only. Do not include commas or units (e.g. $, %, etc.) unless explicitly required.
123
-
124
- If the answer is a string, avoid articles ("a", "an", "the"), do not use abbreviations (e.g., for cities), and spell out all digits (e.g., "two thousand").
125
-
126
- If the answer is a comma-separated list, apply the above rules to each item in the list, depending on whether they are numbers or strings.
127
-
128
- Your answer should only begin with "FINAL ANSWER: " followed directly by the formatted result. Nothing else should follow.
129
- '''
130
- sys_msg = SystemMessage(content=system_prompt)
131
 
132
  tools = [
133
  multiply,
@@ -151,17 +136,7 @@ def build_graph(provider: str = "huggingface"):
151
  # Groq https://console.groq.com/docs/models
152
  llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
153
  elif provider == "huggingface":
154
- llm = ChatHuggingFace(
155
- llm=HuggingFaceEndpoint(
156
- repo_id="TinyLlama/TinyLlama-1.1B-Chat-v1.0",
157
- task="text-generation", # for chat‐style use “text-generation”
158
- max_new_tokens=1024,
159
- do_sample=False,
160
- repetition_penalty=1.03,
161
- temperature=0,
162
- ),
163
- verbose=True,
164
- )
165
  else:
166
  raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
167
  # Bind tools to LLM
@@ -172,19 +147,14 @@ def build_graph(provider: str = "huggingface"):
172
  """Assistant node"""
173
  return {"messages": [llm_with_tools.invoke(state["messages"])]}
174
 
175
- def retriever(state: MessagesState):
176
- """Retriever node"""
177
-
178
- return {"messages": [sys_msg] }
179
 
180
 
181
 
182
  builder = StateGraph(MessagesState)
183
- builder.add_node("retriever", retriever)
184
  builder.add_node("assistant", assistant)
185
  builder.add_node("tools", ToolNode(tools))
186
- builder.add_edge(START, "retriever")
187
- builder.add_edge("retriever", "assistant")
188
  builder.add_conditional_edges(
189
  "assistant",
190
  tools_condition,
 
112
 
113
 
114
 
 
 
115
 
 
 
 
 
 
 
 
 
 
 
 
 
 
116
 
117
  tools = [
118
  multiply,
 
136
  # Groq https://console.groq.com/docs/models
137
  llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
138
  elif provider == "huggingface":
139
+ llm = ChatAnthropic(model='claude-3-7-sonnet-20250219', temperature=0.0)
 
 
 
 
 
 
 
 
 
 
140
  else:
141
  raise ValueError("Invalid provider. Choose 'google', 'groq' or 'huggingface'.")
142
  # Bind tools to LLM
 
147
  """Assistant node"""
148
  return {"messages": [llm_with_tools.invoke(state["messages"])]}
149
 
150
+
 
 
 
151
 
152
 
153
 
154
  builder = StateGraph(MessagesState)
 
155
  builder.add_node("assistant", assistant)
156
  builder.add_node("tools", ToolNode(tools))
157
+ builder.add_edge(START, "assistant")
 
158
  builder.add_conditional_edges(
159
  "assistant",
160
  tools_condition,