lwant commited on
Commit
daf4564
Β·
1 Parent(s): 5ab1e4f

Refactor `agent.py` to enhance `Context` usage, remove unused imports and parameters, and disable parallel tool calls in `gaia_solving_agent`.

Browse files
Files changed (1) hide show
  1. src/gaia_solving_agent/agent.py +6 -7
src/gaia_solving_agent/agent.py CHANGED
@@ -1,7 +1,7 @@
1
  from pathlib import Path
2
  from typing import Literal
3
 
4
- from llama_index.core.agent.workflow import FunctionAgent, AgentWorkflow
5
  from llama_index.core.prompts import RichPromptTemplate
6
  from llama_index.llms.nebius import NebiusLLM
7
  from llama_index.tools.requests import RequestsToolSpec
@@ -108,7 +108,6 @@ Stick strictly to the formatting constraints !
108
  async def multi_agent_process(self, ctx: Context, ev: QueryEvent) -> AnswerEvent:
109
  plan = await ctx.get("plan")
110
  additional_file = await ctx.get("additional_file")
111
- additional_file_path = await ctx.get("additional_file_path")
112
 
113
  question = await ctx.get("question")
114
  known_facts = await ctx.get("known_facts")
@@ -127,11 +126,12 @@ The sub-tasks are :
127
  from llama_index.core.memory import ChatMemoryBuffer
128
  memory = ChatMemoryBuffer.from_defaults(token_limit=100000)
129
 
 
 
130
  agent_output = await gaia_solving_agent.run(
131
- user_msg=plan,
132
  memory=memory,
133
- additional_file=additional_file,
134
- additional_file_path=additional_file_path,
135
  )
136
  return AnswerEvent(plan=plan, answer=str(agent_output))
137
 
@@ -208,7 +208,6 @@ gaia_solving_agent = FunctionAgent(
208
  tavily_search_web,
209
  *load_and_search_tools_from_toolspec(WikipediaToolSpec()),
210
  *simple_web_page_reader_toolspec.to_tool_list(),
211
- *RequestsToolSpec().to_tool_list(),
212
  *youtube_transcript_reader_toolspec.to_tool_list(),
213
  ],
214
  llm=get_llm(balanced_model_name),
@@ -228,6 +227,6 @@ gaia_solving_agent = FunctionAgent(
228
  """,
229
  name="gaia_solving_agent",
230
  description="Agent that browse additional information and resources on the web.",
231
- num_concurrent_runs=1,
232
  )
233
 
 
1
  from pathlib import Path
2
  from typing import Literal
3
 
4
+ from llama_index.core.agent.workflow import FunctionAgent
5
  from llama_index.core.prompts import RichPromptTemplate
6
  from llama_index.llms.nebius import NebiusLLM
7
  from llama_index.tools.requests import RequestsToolSpec
 
108
  async def multi_agent_process(self, ctx: Context, ev: QueryEvent) -> AnswerEvent:
109
  plan = await ctx.get("plan")
110
  additional_file = await ctx.get("additional_file")
 
111
 
112
  question = await ctx.get("question")
113
  known_facts = await ctx.get("known_facts")
 
126
  from llama_index.core.memory import ChatMemoryBuffer
127
  memory = ChatMemoryBuffer.from_defaults(token_limit=100000)
128
 
129
+ agent_ctx = Context(gaia_solving_agent)
130
+ await agent_ctx.set("additional_file", additional_file)
131
  agent_output = await gaia_solving_agent.run(
132
+ user_msg=prompt,
133
  memory=memory,
134
+ ctx=agent_ctx,
 
135
  )
136
  return AnswerEvent(plan=plan, answer=str(agent_output))
137
 
 
208
  tavily_search_web,
209
  *load_and_search_tools_from_toolspec(WikipediaToolSpec()),
210
  *simple_web_page_reader_toolspec.to_tool_list(),
 
211
  *youtube_transcript_reader_toolspec.to_tool_list(),
212
  ],
213
  llm=get_llm(balanced_model_name),
 
227
  """,
228
  name="gaia_solving_agent",
229
  description="Agent that browse additional information and resources on the web.",
230
+ allow_parallel_tool_calls=False,
231
  )
232