YeeJun02 commited on
Commit
0982499
·
verified ·
1 Parent(s): c497a87

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +65 -79
app.py CHANGED
@@ -1,105 +1,91 @@
1
  import os
 
2
  import datetime
3
  import pytz
4
- import math
5
- import requests
6
- import gradio as gr
7
- from smolagents import CodeAgent, DuckDuckGoSearchTool, tool, InferenceClientModel
8
- from deep_translator import GoogleTranslator
9
- from llama_index.core.agent import ReActAgent
10
  from llama_index.core.tools import FunctionTool
11
  from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
12
 
13
- # ============ 1. SMOLAGENTS TOOLS ============
 
14
 
15
- @tool
16
- def time_tool(timezone: str = "UTC") -> str:
17
- """
18
- Get current time in a specific timezone.
19
- Args:
20
- timezone: The timezone to check (e.g., 'Asia/Tokyo').
21
- """
22
- try:
23
- tz = pytz.timezone(timezone)
24
- now = datetime.datetime.now(tz)
25
- return f"⏰ Time in {timezone}: {now.strftime('%Y-%m-%d %H:%M:%S')}"
26
- except Exception as e:
27
- return f"Error: {str(e)}"
28
 
29
- @tool
30
- def translator_tool(text: str, target_language: str) -> str:
31
- """
32
- Translates text into a specified language.
33
- Args:
34
- text: The text or phrase to translate.
35
- target_language: The destination language (e.g., 'french', 'german').
36
- """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  try:
38
- translation = GoogleTranslator(source='auto', target=target_language).translate(text)
39
- return f"Translated to {target_language.title()}: {translation}"
40
  except Exception as e:
41
- return f"Translation error: {str(e)}"
 
 
 
 
 
 
 
 
 
42
 
43
  @tool
44
  def weather_tool(location: str) -> str:
45
- """
46
- Get the current weather for any location worldwide.
47
  Args:
48
- location: The name of the city or place (e.g., 'London' or 'Tokyo').
49
  """
50
- try:
51
- geo_url = f"https://geocoding-api.open-meteo.com/v1/search?name={location}&count=1&language=en&format=json"
52
- geo_res = requests.get(geo_url).json()
53
- if not geo_res.get('results'): return "Location not found."
54
- data = geo_res['results'][0]
55
- weather_url = f"https://api.open-meteo.com/v1/forecast?latitude={data['latitude']}&longitude={data['longitude']}&current=temperature_2m"
56
- w_res = requests.get(weather_url).json()
57
- return f"🌤️ {data['name']}: {w_res['current']['temperature_2m']}°C"
58
- except Exception as e:
59
- return f"Error: {str(e)}"
60
-
61
- # ============ AGENT CONFIGURATION ============
62
-
63
- HF_TOKEN = os.getenv("HF_TOKEN", "")
64
- smol_model = InferenceClientModel(model_id="Qwen/Qwen2.5-Coder-32B-Instruct", token=HF_TOKEN)
65
 
66
  smol_agent = CodeAgent(
67
  model=smol_model,
68
- tools=[DuckDuckGoSearchTool(), time_tool, translator_tool, weather_tool],
69
- max_steps=5
70
  )
71
 
72
- # LlamaIndex Setup
73
- llama_llm = HuggingFaceInferenceAPI(
74
- model_name="Qwen/Qwen2.5-7B-Instruct",
75
- token=HF_TOKEN,
76
- task="conversational"
77
- )
78
-
79
- llama_agent = ReActAgent.from_tools(
80
- [
81
- FunctionTool.from_defaults(fn=lambda: f"Tokyo: {datetime.datetime.now(pytz.timezone('Asia/Tokyo'))}", name="get_tokyo_time"),
82
- FunctionTool.from_defaults(fn=lambda a, b: a * b, name="multiply")
83
- ],
84
- llm=llama_llm,
85
- verbose=True
86
- )
87
-
88
- # ============ GRADIO UI ============
89
-
90
  def chat_smol(message, history):
91
- return str(smol_agent.run(message))
92
-
93
- def chat_llama(message, history):
94
- response = llama_agent.chat(message)
95
- return str(response)
96
 
 
 
 
97
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
98
- gr.Markdown("# 🤖 Multi-Framework AI Agent Space")
99
- with gr.Tab("Smolagents (Alfred)"):
100
- gr.ChatInterface(chat_smol)
101
- with gr.Tab("LlamaIndex (ReAct)"):
102
- gr.ChatInterface(chat_llama)
 
 
 
103
 
104
  if __name__ == "__main__":
105
  demo.launch()
 
1
  import os
2
+ import gradio as gr
3
  import datetime
4
  import pytz
5
+ import asyncio
6
+
7
+ # Framework 1: LlamaIndex
8
+ from llama_index.core.agent.workflow import AgentWorkflow
 
 
9
  from llama_index.core.tools import FunctionTool
10
  from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
11
 
12
+ # Framework 2: smolagents
13
+ from smolagents import CodeAgent, DuckDuckGoSearchTool, tool, InferenceClientModel
14
 
15
+ # 0. SHARED CONFIG
16
+ HF_TOKEN = os.getenv("HF_TOKEN")
17
+ # 7B is the sweet spot for free serverless inference in 2026
18
+ MODEL_ID = "Qwen/Qwen2.5-7B-Instruct"
 
 
 
 
 
 
 
 
 
19
 
20
+ # ==========================================
21
+ # PART 1: LLAMAINDEX AGENT
22
+ # ==========================================
23
+ li_llm = HuggingFaceInferenceAPI(
24
+ model_name=MODEL_ID,
25
+ token=HF_TOKEN,
26
+ provider="together"
27
+ )
28
+
29
+ def get_tokyo_time() -> str:
30
+ """Returns the current time in Tokyo, Japan."""
31
+ tz = pytz.timezone('Asia/Tokyo')
32
+ return f"The current time in Tokyo is {datetime.datetime.now(tz).strftime('%H:%M:%S')}"
33
+
34
+ li_tools = [FunctionTool.from_defaults(fn=get_tokyo_time)]
35
+
36
+ li_agent = AgentWorkflow.from_tools_or_functions(
37
+ li_tools,
38
+ llm=li_llm,
39
+ )
40
+
41
+ async def chat_llama(message, history):
42
  try:
43
+ result = await li_agent.run(user_msg=message)
44
+ return str(result)
45
  except Exception as e:
46
+ return f"LlamaIndex Error: {str(e)}"
47
+
48
+ # ==========================================
49
+ # PART 2: SMOLAGENTS
50
+ # ==========================================
51
+ smol_model = InferenceClientModel(
52
+ model_id=MODEL_ID,
53
+ token=HF_TOKEN,
54
+ provider="together"
55
+ )
56
 
57
  @tool
58
  def weather_tool(location: str) -> str:
59
+ """Get the current weather for a location.
 
60
  Args:
61
+ location: The city name.
62
  """
63
+ return f"The weather in {location} is currently sunny and 22°C."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
64
 
65
  smol_agent = CodeAgent(
66
  model=smol_model,
67
+ tools=[weather_tool, DuckDuckGoSearchTool()]
 
68
  )
69
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  def chat_smol(message, history):
71
+ try:
72
+ response = smol_agent.run(message)
73
+ return str(response)
74
+ except Exception as e:
75
+ return f"Smolagents Error: {str(e)}"
76
 
77
+ # ==========================================
78
+ # PART 3: UNIFIED GRADIO UI
79
+ # ==========================================
80
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
81
+ gr.Markdown("# 🤖 Consolidated AI Agent Space")
82
+ gr.Markdown(f"Currently using **{MODEL_ID}** via Together AI Provider.")
83
+
84
+ with gr.Tab("LlamaIndex (Workflow)"):
85
+ gr.ChatInterface(fn=chat_llama)
86
+
87
+ with gr.Tab("smolagents (CodeAgent)"):
88
+ gr.ChatInterface(fn=chat_smol)
89
 
90
  if __name__ == "__main__":
91
  demo.launch()