Solobrad commited on
Commit
90f0e8c
·
verified ·
1 Parent(s): a386d51

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +63 -171
agent.py CHANGED
@@ -1,224 +1,116 @@
 
 
1
  from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
2
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
3
  from langchain_community.document_loaders import WikipediaLoader
4
  from llama_index.core.tools.types import ToolMetadata
5
  from llama_index.core.schema import Document
6
  from llama_index.core.tools import FunctionTool
7
- from langchain_core.tools import tool
8
  from langchain_community.tools.tavily_search import TavilySearchResults
9
  from llama_index.core.agent.workflow import AgentWorkflow
10
- import os
11
- import requests
12
 
13
  hf_token = os.getenv("HF_TOKEN")
14
 
15
- # List of models to try in order
16
- model_list = [
17
- "TinyLlama/TinyLlama-1.1B-Chat-v1.0", # Your current working model
18
- "microsoft/phi-3-mini-128k-instruct", # You mentioned this works
19
- "google/gemma-2b-it", # Backup option
20
- "gpt2" # Final fallback
21
  ]
22
 
23
- # Try to create an LLM with the first model
24
  current_model_index = 0
25
  llm = HuggingFaceInferenceAPI(
26
  model_name=model_list[current_model_index],
27
- token=os.getenv("HF_TOKEN"),
28
  )
29
 
30
- @tool
31
  def multiply(a: int, b: int) -> int:
32
- """Multiply two numbers.
33
- Args:
34
- a: first int
35
- b: second int
36
- """
37
  return a * b
38
 
39
- @tool
 
40
  def add(a: int, b: int) -> int:
41
- """Add two numbers.
42
-
43
- Args:
44
- a: first int
45
- b: second int
46
- """
47
  return a + b
48
 
49
- @tool
50
  def subtract(a: int, b: int) -> int:
51
- """Subtract two numbers.
52
-
53
- Args:
54
- a: first int
55
- b: second int
56
- """
57
  return a - b
58
 
59
- @tool
60
- def divide(a: int, b: int) -> float: # Changed to float for accurate division
61
- """Divide two numbers.
62
-
63
- Args:
64
- a: first int
65
- b: second int
66
- """
67
  if b == 0:
68
  raise ValueError("Cannot divide by zero.")
69
  return a / b
70
 
71
- @tool
72
  def modulus(a: int, b: int) -> int:
73
- """Get the modulus of two numbers.
74
-
75
- Args:
76
- a: first int
77
- b: second int
78
- """
79
  return a % b
80
-
81
- @tool
82
  def web_search(query: str) -> list:
83
- """Search Tavily for a query and return a maximum 3 results as LlamaIndex Documents.
84
- Args:
85
- query: The search query.
86
- """
87
- search_docs = TavilySearchResults(max_results=3).invoke(query=query)
88
-
89
- llama_docs = []
90
- for doc in search_docs:
91
- source = doc.metadata.get("source", "")
92
- page = doc.metadata.get("page", "")
93
- content = doc.page_content
94
- full_metadata = {
95
- "source": source,
96
- "page": page
97
- }
98
- llama_docs.append(Document(text=content, metadata=full_metadata))
99
-
100
- return llama_docs
101
-
102
- @tool
103
  def wiki_search(query: str) -> list:
104
- """Search Wikipedia for a query and return maximum 2 results as LlamaIndex Documents.
105
- Args:
106
- query: The search query.
107
- """
108
- # Perform Wikipedia search using WikipediaLoader
109
- search_docs = WikipediaLoader(query=query, load_max_docs=2).load()
110
-
111
- # Create a list of LlamaIndex Document objects
112
- llama_docs = []
113
- for doc in search_docs:
114
- source = doc.metadata.get("source", "")
115
- page = doc.metadata.get("page", "")
116
- content = doc.page_content
117
- full_metadata = {
118
- "source": source,
119
- "page": page
120
- }
121
- llama_docs.append(Document(text=content, metadata=full_metadata))
122
-
123
- return llama_docs
124
-
125
-
126
- web_search = FunctionTool(
127
  web_search,
128
  metadata=ToolMetadata(name="web_search", description="Tavily 3-hit search")
129
  )
130
-
131
- wiki_search = FunctionTool(
132
  wiki_search,
133
  metadata=ToolMetadata(name="wiki_search", description="Wikipedia 2-hit search")
134
  )
135
 
136
- multiply_tool = FunctionTool(
137
- multiply,
138
- metadata=ToolMetadata(name="multiply", description="Multiply two numbers.")
139
- )
140
- add_tool = FunctionTool(
141
- add,
142
- metadata=ToolMetadata(name="add", description="Add two numbers.")
143
- )
144
- subtract_tool = FunctionTool(
145
- subtract,
146
- metadata=ToolMetadata(name="subtract", description="Subtract two numbers.")
147
- )
148
- divide_tool = FunctionTool(
149
- divide,
150
- metadata=ToolMetadata(name="divide", description="Divide two numbers.")
151
- )
152
- modulus_tool = FunctionTool(
153
- modulus,
154
- metadata=ToolMetadata(name="modulus", description="Modulus operation on two numbers.")
155
- )
156
 
157
- # Then include all tools in the list
158
  tools = [
159
- web_search,
160
- wiki_search,
161
  multiply_tool,
162
  add_tool,
163
  subtract_tool,
164
  divide_tool,
165
- modulus_tool
166
  ]
167
 
168
- # Create the agent with the first model
169
  agent = AgentWorkflow.from_tools_or_functions(tools, llm=llm)
170
 
171
- # Simple function to try a different model when needed
172
- def try_next_model():
 
 
173
  global current_model_index, llm, agent
174
-
175
- # Move to the next model if available
176
- if current_model_index < len(model_list) - 1:
177
- current_model_index += 1
178
- next_model = model_list[current_model_index]
179
- print(f"Switching to backup model: {next_model}")
180
-
181
- # Create a new LLM with the next model
182
- llm = HuggingFaceInferenceAPI(
183
- model_name=next_model,
184
- token=os.getenv("HF_TOKEN"),
185
- )
186
-
187
- # Create a new agent with the new model
188
- agent = AgentWorkflow.from_tools_or_functions(tools, llm=llm)
189
- return True
190
- else:
191
- print("No more models to try!")
192
- return False
193
-
194
- # Function to run the agent with fallback
195
- def run_with_fallback(query):
196
- global agent
197
-
198
- # Try to run the query with the current model
199
- try:
200
- return agent.run(query)
201
- except Exception as e:
202
- print(f"Error running agent: {e}")
203
-
204
- # Try next model
205
- if try_next_model():
206
- # Try the query with the new model
207
- try:
208
- return agent.run(query)
209
- except Exception as e2:
210
- print(f"Error with fallback model: {e2}")
211
-
212
- # One more attempt with another model
213
- if try_next_model():
214
- try:
215
- return agent.run(query)
216
- except Exception as e3:
217
- print(f"Error with second fallback model: {e3}")
218
-
219
- # If all attempts fail, return an error message
220
- return "Sorry, I encountered technical issues with all available models."
221
-
222
- # Usage example:
223
- # result = run_with_fallback("What is 42 * 5?")
224
- # print(result)
 
1
+ import os
2
+ import requests
3
  from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
4
  from llama_index.embeddings.huggingface import HuggingFaceEmbedding
5
  from langchain_community.document_loaders import WikipediaLoader
6
  from llama_index.core.tools.types import ToolMetadata
7
  from llama_index.core.schema import Document
8
  from llama_index.core.tools import FunctionTool
 
9
  from langchain_community.tools.tavily_search import TavilySearchResults
10
  from llama_index.core.agent.workflow import AgentWorkflow
 
 
11
 
12
  hf_token = os.getenv("HF_TOKEN")
13
 
14
+ # List of models to try in order\ nmodel_list = [
15
+ "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
16
+ "microsoft/phi-3-mini-128k-instruct",
17
+ "google/gemma-2b-it",
18
+ "gpt2"
 
19
  ]
20
 
 
21
  current_model_index = 0
22
  llm = HuggingFaceInferenceAPI(
23
  model_name=model_list[current_model_index],
24
+ token=hf_token,
25
  )
26
 
27
+ # Numerical operation functions (plain definitions)
28
  def multiply(a: int, b: int) -> int:
29
+ """Multiply two numbers."""
 
 
 
 
30
  return a * b
31
 
32
+ # Changed: removed @tool decorator above each function
33
+
34
  def add(a: int, b: int) -> int:
35
+ """Add two numbers."""
 
 
 
 
 
36
  return a + b
37
 
 
38
  def subtract(a: int, b: int) -> int:
39
+ """Subtract two numbers."""
 
 
 
 
 
40
  return a - b
41
 
42
+ def divide(a: int, b: int) -> float:
43
+ """Divide two numbers, raises error on zero divisor."""
 
 
 
 
 
 
44
  if b == 0:
45
  raise ValueError("Cannot divide by zero.")
46
  return a / b
47
 
 
48
  def modulus(a: int, b: int) -> int:
49
+ """Get the modulus of two numbers."""
 
 
 
 
 
50
  return a % b
51
+
52
+ # Web search tool function (plain definition)
53
  def web_search(query: str) -> list:
54
+ """Search Tavily for a query and return up to 3 results."""
55
+ results = TavilySearchResults(max_results=3).invoke(query=query)
56
+ docs = []
57
+ for r in results:
58
+ meta = {"source": r.metadata.get("source", ""), "page": r.metadata.get("page", "")}
59
+ docs.append(Document(text=r.page_content, metadata=meta))
60
+ return docs
61
+
62
+ # Wikipedia search tool function (plain definition)
 
 
 
 
 
 
 
 
 
 
 
63
  def wiki_search(query: str) -> list:
64
+ """Search Wikipedia for a query and return up to 2 results."""
65
+ results = WikipediaLoader(query=query, load_max_docs=2).load()
66
+ docs = []
67
+ for r in results:
68
+ meta = {"source": r.metadata.get("source", ""), "page": r.metadata.get("page", "")}
69
+ docs.append(Document(text=r.page_content, metadata=meta))
70
+ return docs
71
+
72
+ # Wrap functions into FunctionTool instances\ +
73
+ web_search_tool = FunctionTool(
 
 
 
 
 
 
 
 
 
 
 
 
 
74
  web_search,
75
  metadata=ToolMetadata(name="web_search", description="Tavily 3-hit search")
76
  )
77
+ wiki_search_tool = FunctionTool(
 
78
  wiki_search,
79
  metadata=ToolMetadata(name="wiki_search", description="Wikipedia 2-hit search")
80
  )
81
 
82
+ multiply_tool = FunctionTool(multiply, metadata=ToolMetadata(name="multiply", description="Multiply two numbers."))
83
+ add_tool = FunctionTool(add, metadata=ToolMetadata(name="add", description="Add two numbers."))
84
+ subtract_tool = FunctionTool(subtract, metadata=ToolMetadata(name="subtract", description="Subtract two numbers."))
85
+ divide_tool = FunctionTool(divide, metadata=ToolMetadata(name="divide", description="Divide two numbers."))
86
+ modulus_tool = FunctionTool(modulus, metadata=ToolMetadata(name="modulus", description="Modulus operation on two numbers."))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
 
88
+ # Aggregate all tools
89
  tools = [
90
+ web_search_tool,
91
+ wiki_search_tool,
92
  multiply_tool,
93
  add_tool,
94
  subtract_tool,
95
  divide_tool,
96
+ modulus_tool,
97
  ]
98
 
99
+ # Initialize agent
100
  agent = AgentWorkflow.from_tools_or_functions(tools, llm=llm)
101
 
102
+ # Improved fallback logic with loop
103
+ # Changed: simplified run_with_fallback to iterate through models
104
+
105
+ def run_with_fallback(query: str):
106
  global current_model_index, llm, agent
107
+ for _ in model_list:
108
+ try:
109
+ return agent.run(query)
110
+ except Exception as e:
111
+ print(f"Error with model {model_list[current_model_index]}: {e}")
112
+ if not try_next_model():
113
+ break
114
+ return "Sorry, encountered issues with all models."
115
+
116
+ # try_next_model remains unchanged