YeeJun02 commited on
Commit
d0f5a39
·
verified ·
1 Parent(s): ca8cbb1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -7
app.py CHANGED
@@ -30,16 +30,15 @@ def get_tokyo_time() -> str:
30
 
31
  li_tools = [FunctionTool.from_defaults(fn=get_tokyo_time)]
32
 
33
- # FIX: Passed li_tools as a positional argument (no 'tools=' key)
34
  li_agent = AgentWorkflow.from_tools_or_functions(
35
  li_tools,
36
  llm=li_llm,
37
- system_prompt="You are a helpful assistant with access to a Tokyo time tool."
38
  )
39
 
40
  async def chat_llama(message, history):
41
  try:
42
- # AgentWorkflow.run returns a handler; we await the final result
43
  result = await li_agent.run(user_msg=message)
44
  return str(result)
45
  except Exception as e:
@@ -61,6 +60,7 @@ def weather_tool(location: str) -> str:
61
  """
62
  return f"The weather in {location} is currently sunny and 22°C."
63
 
 
64
  smol_agent = CodeAgent(
65
  model=smol_model,
66
  tools=[weather_tool, DuckDuckGoSearchTool()]
@@ -68,7 +68,7 @@ smol_agent = CodeAgent(
68
 
69
  def chat_smol(message, history):
70
  try:
71
- # smolagents .run() is synchronous by default
72
  response = smol_agent.run(message)
73
  return str(response)
74
  except Exception as e:
@@ -78,14 +78,13 @@ def chat_smol(message, history):
78
  # PART 3: UNIFIED GRADIO UI
79
  # ==========================================
80
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
81
- gr.Markdown("# 🤖 Dual-Agent Testing Space (2026 Edition)")
 
82
 
83
  with gr.Tab("LlamaIndex (Workflow)"):
84
- gr.Markdown("Uses an **Event-Driven Workflow** for reasoning.")
85
  gr.ChatInterface(fn=chat_llama)
86
 
87
  with gr.Tab("smolagents (CodeAgent)"):
88
- gr.Markdown("Uses a **Code-Execution Loop** to solve tasks.")
89
  gr.ChatInterface(fn=chat_smol)
90
 
91
  if __name__ == "__main__":
 
30
 
31
  li_tools = [FunctionTool.from_defaults(fn=get_tokyo_time)]
32
 
33
+ # Using positional argument for tools list to avoid TypeError
34
  li_agent = AgentWorkflow.from_tools_or_functions(
35
  li_tools,
36
  llm=li_llm,
 
37
  )
38
 
39
  async def chat_llama(message, history):
40
  try:
41
+ # AgentWorkflow requires async execution
42
  result = await li_agent.run(user_msg=message)
43
  return str(result)
44
  except Exception as e:
 
60
  """
61
  return f"The weather in {location} is currently sunny and 22°C."
62
 
63
+ # Tool initialization happens here; ensure ddgs is in requirements.txt
64
  smol_agent = CodeAgent(
65
  model=smol_model,
66
  tools=[weather_tool, DuckDuckGoSearchTool()]
 
68
 
69
  def chat_smol(message, history):
70
  try:
71
+ # CodeAgent .run() is synchronous
72
  response = smol_agent.run(message)
73
  return str(response)
74
  except Exception as e:
 
78
  # PART 3: UNIFIED GRADIO UI
79
  # ==========================================
80
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
81
+ gr.Markdown("# 🤖 Dual-Agent Testing Space")
82
+ gr.Markdown("Comparing **Event-Driven Workflows** (LlamaIndex) vs **Code Execution** (smolagents).")
83
 
84
  with gr.Tab("LlamaIndex (Workflow)"):
 
85
  gr.ChatInterface(fn=chat_llama)
86
 
87
  with gr.Tab("smolagents (CodeAgent)"):
 
88
  gr.ChatInterface(fn=chat_smol)
89
 
90
  if __name__ == "__main__":