combine / app.py
Jovynne's picture
Update app.py
c8bb330 verified
import os
import gradio as gr
import datetime
import pytz
import asyncio
# Framework 1: LlamaIndex
from llama_index.core.agent.workflow import AgentWorkflow
from llama_index.core.tools import FunctionTool
from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
# Framework 2: smolagents
from smolagents import CodeAgent, DuckDuckGoSearchTool, tool, InferenceClientModel
# 0. SHARED CONFIG
HF_TOKEN = os.getenv("HF_TOKEN")
# 7B is the sweet spot for free serverless inference in 2026
MODEL_ID = "Qwen/Qwen2.5-7B-Instruct"
# ==========================================
# PART 1: LLAMAINDEX AGENT
# ==========================================
li_llm = HuggingFaceInferenceAPI(
model_name=MODEL_ID,
token=HF_TOKEN,
provider="together"
)
def get_tokyo_time() -> str:
"""Returns the current time in Tokyo, Japan."""
tz = pytz.timezone('Asia/Tokyo')
return f"The current time in Tokyo is {datetime.datetime.now(tz).strftime('%H:%M:%S')}"
li_tools = [FunctionTool.from_defaults(fn=get_tokyo_time)]
li_agent = AgentWorkflow.from_tools_or_functions(
li_tools,
llm=li_llm,
)
async def chat_llama(message, history):
try:
result = await li_agent.run(user_msg=message)
return str(result)
except Exception as e:
return f"LlamaIndex Error: {str(e)}"
# ==========================================
# PART 2: SMOLAGENTS
# ==========================================
smol_model = InferenceClientModel(
model_id=MODEL_ID,
token=HF_TOKEN,
provider="together"
)
@tool
def weather_tool(location: str) -> str:
"""Get the current weather for a location.
Args:
location: The city name.
"""
return f"The weather in {location} is currently sunny and 22°C."
smol_agent = CodeAgent(
model=smol_model,
tools=[weather_tool, DuckDuckGoSearchTool()]
)
def chat_smol(message, history):
try:
response = smol_agent.run(message)
return str(response)
except Exception as e:
return f"Smolagents Error: {str(e)}"
# ==========================================
# PART 3: UNIFIED GRADIO UI
# ==========================================
with gr.Blocks(theme=gr.themes.Soft()) as demo:
gr.Markdown("# 🤖 Consolidated AI Agent Space")
gr.Markdown(f"Currently using **{MODEL_ID}** via Together AI Provider.")
with gr.Tab("LlamaIndex (Workflow)"):
gr.ChatInterface(fn=chat_llama)
with gr.Tab("smolagents (CodeAgent)"):
gr.ChatInterface(fn=chat_smol)
if __name__ == "__main__":
demo.launch()