Wayne0102 commited on
Commit
2c00ca8
·
verified ·
1 Parent(s): 5f6bf5a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +66 -43
app.py CHANGED
@@ -12,71 +12,94 @@ from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
12
  # Framework 2: smolagents
13
  from smolagents import CodeAgent, DuckDuckGoSearchTool, tool, InferenceClientModel
14
 
15
- # 0. CONFIG
16
  HF_TOKEN = os.getenv("HF_TOKEN")
17
  MODEL_ID = "Qwen/Qwen2.5-7B-Instruct"
18
 
19
  # ==========================================
20
- # AGENTS SETUP
21
  # ==========================================
22
-
23
- # LlamaIndex Setup
24
  li_llm = HuggingFaceInferenceAPI(model_name=MODEL_ID, token=HF_TOKEN, provider="together")
 
25
  def get_tokyo_time() -> str:
26
  """Returns the current time in Tokyo, Japan."""
27
  tz = pytz.timezone('Asia/Tokyo')
28
  return f"The current time in Tokyo is {datetime.datetime.now(tz).strftime('%H:%M:%S')}"
29
 
30
- def get_weather_li(location: str) -> str:
31
- """Get current weather for a location. Args: location: city name."""
32
- return f"LlamaIndex reports: The weather in {location} is 22°C and sunny."
 
 
 
 
 
33
 
34
- li_agent = AgentWorkflow.from_tools_or_functions(
35
- [FunctionTool.from_defaults(fn=get_tokyo_time), FunctionTool.from_defaults(fn=get_weather_li)],
36
- llm=li_llm,
37
- )
 
 
38
 
39
- # smolagents Setup
 
 
 
 
 
 
 
 
 
40
  smol_model = InferenceClientModel(model_id=MODEL_ID, token=HF_TOKEN, provider="together")
 
41
  @tool
42
  def weather_tool(location: str) -> str:
43
- """Get current weather. Args: location: city name."""
44
- return f"smolagents reports: The weather in {location} is 22°C and sunny."
45
 
46
- smol_agent = CodeAgent(model=smol_model, tools=[weather_tool, DuckDuckGoSearchTool()])
 
 
 
47
 
48
- # ==========================================
49
- # CHAT FUNCTIONS
50
- # ==========================================
51
- async def chat_llama(message, history):
52
- result = await li_agent.run(user_msg=message)
53
- return str(result)
54
 
55
  def chat_smol(message, history):
56
- return str(smol_agent.run(message))
 
 
 
 
57
 
58
  # ==========================================
59
- # IMPROVED UI
60
  # ==========================================
61
- with gr.Blocks(theme=gr.themes.Soft(), css=".agent-header {text-align:center} .sidebar-info {font-size: 0.9em}") as demo:
62
- with gr.Row(elem_classes="agent-header"):
63
- gr.Markdown(f"# 🤖 Agentic Framework Workspace\n**Backend:** {MODEL_ID} via Together AI")
64
-
65
- with gr.Row():
66
- with gr.Column(scale=1):
67
- with gr.Group():
68
- gr.Markdown("### 🛠️ Available Tools")
69
- gr.Markdown("**LlamaIndex:**\n- `tokyo_time`\n- `get_weather`", elem_classes="sidebar-info")
70
- gr.Markdown("**smolagents:**\n- `weather_tool`\n- `DuckDuckGo Search`\n- `Python Interpreter`", elem_classes="sidebar-info")
71
- gr.Info("LlamaIndex uses Event-Driven Workflows, while smolagents generates and executes Python code directly.")
72
-
73
- with gr.Column(scale=4):
74
- with gr.Tabs():
75
- with gr.Tab("🏗️ LlamaIndex (Workflow)"):
76
- gr.ChatInterface(fn=chat_llama, examples=["How is the weather in Tokyo?", "What time is it in Tokyo?"])
77
-
78
- with gr.Tab("💻 smolagents (CodeAgent)"):
79
- gr.ChatInterface(fn=chat_smol, examples=["Search for 2026 AI trends", "What is the weather in London?"])
80
 
81
  if __name__ == "__main__":
82
- demo.launch()
 
 
 
 
 
12
  # Framework 2: smolagents
13
  from smolagents import CodeAgent, DuckDuckGoSearchTool, tool, InferenceClientModel
14
 
15
+ # 0. SHARED CONFIG
16
  HF_TOKEN = os.getenv("HF_TOKEN")
17
  MODEL_ID = "Qwen/Qwen2.5-7B-Instruct"
18
 
19
  # ==========================================
20
+ # PART 1: LLAMAINDEX AGENT
21
  # ==========================================
 
 
22
  li_llm = HuggingFaceInferenceAPI(model_name=MODEL_ID, token=HF_TOKEN, provider="together")
23
+
24
  def get_tokyo_time() -> str:
25
  """Returns the current time in Tokyo, Japan."""
26
  tz = pytz.timezone('Asia/Tokyo')
27
  return f"The current time in Tokyo is {datetime.datetime.now(tz).strftime('%H:%M:%S')}"
28
 
29
+ def get_weather(location: str) -> str:
30
+ """
31
+ Get the current weather for a specific location.
32
+
33
+ Args:
34
+ location: The city name to check weather for.
35
+ """
36
+ return f"LlamaIndex Tool: The weather in {location} is currently sunny and 22°C."
37
 
38
+ li_tools = [
39
+ FunctionTool.from_defaults(fn=get_tokyo_time),
40
+ FunctionTool.from_defaults(fn=get_weather) # Added weather tool here
41
+ ]
42
+
43
+ li_agent = AgentWorkflow.from_tools_or_functions(li_tools, llm=li_llm)
44
 
45
+ async def chat_llama(message, history):
46
+ try:
47
+ result = await li_agent.run(user_msg=message)
48
+ return str(result)
49
+ except Exception as e:
50
+ return f"❌ LlamaIndex Error: {str(e)}"
51
+
52
+ # ==========================================
53
+ # PART 2: SMOLAGENTS
54
+ # ==========================================
55
  smol_model = InferenceClientModel(model_id=MODEL_ID, token=HF_TOKEN, provider="together")
56
+
57
  @tool
58
  def weather_tool(location: str) -> str:
59
+ """
60
+ Get the current weather for a location.
61
 
62
+ Args:
63
+ location: The city and country, e.g., 'London, UK'.
64
+ """
65
+ return f"smolagents Tool: The weather in {location} is currently sunny and 22°C."
66
 
67
+ smol_agent = CodeAgent(
68
+ model=smol_model,
69
+ tools=[weather_tool, DuckDuckGoSearchTool()],
70
+ add_base_tools=True
71
+ )
 
72
 
73
  def chat_smol(message, history):
74
+ try:
75
+ response = smol_agent.run(message)
76
+ return str(response)
77
+ except Exception as e:
78
+ return f"❌ Smolagents Error: {str(e)}"
79
 
80
  # ==========================================
81
+ # PART 3: GRADIO 6.0 UI
82
  # ==========================================
83
+ with gr.Blocks() as demo:
84
+ gr.Markdown("# 🤖 Consolidated AI Agent Space", elem_id="main-title")
85
+ gr.Markdown(f"Currently using **{MODEL_ID}** via Together AI Provider.")
86
+
87
+ with gr.Tabs():
88
+ with gr.Tab("🏗️ LlamaIndex (Workflow)"):
89
+ gr.ChatInterface(
90
+ fn=chat_llama,
91
+ examples=["What's the weather in Tokyo?", "What time is it in Japan?"]
92
+ )
93
+
94
+ with gr.Tab("💻 smolagents (CodeAgent)"):
95
+ gr.ChatInterface(
96
+ fn=chat_smol,
97
+ examples=["Search for the latest AI news", "How is the weather in Paris?"]
98
+ )
 
 
 
99
 
100
  if __name__ == "__main__":
101
+ # Gradio 6.0: Move theme/css to launch()
102
+ demo.launch(
103
+ theme=gr.themes.Soft(),
104
+ css="#main-title { text-align: center; margin-bottom: 20px; }"
105
+ )