| import os |
| import gradio as gr |
| import datetime |
| import pytz |
| import asyncio |
|
|
| |
| from llama_index.core.agent.workflow import AgentWorkflow |
| from llama_index.core.tools import FunctionTool |
| from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI |
|
|
| |
| from smolagents import CodeAgent, DuckDuckGoSearchTool, tool, InferenceClientModel |
|
|
| |
| HF_TOKEN = os.getenv("HF_TOKEN") |
| |
| MODEL_ID = "Qwen/Qwen2.5-7B-Instruct" |
|
|
| |
| |
| |
| li_llm = HuggingFaceInferenceAPI( |
| model_name=MODEL_ID, |
| token=HF_TOKEN, |
| provider="together" |
| ) |
|
|
| def get_tokyo_time() -> str: |
| """Returns the current time in Tokyo, Japan.""" |
| tz = pytz.timezone('Asia/Tokyo') |
| return f"The current time in Tokyo is {datetime.datetime.now(tz).strftime('%H:%M:%S')}" |
|
|
| li_tools = [FunctionTool.from_defaults(fn=get_tokyo_time)] |
|
|
| li_agent = AgentWorkflow.from_tools_or_functions( |
| li_tools, |
| llm=li_llm, |
| ) |
|
|
| async def chat_llama(message, history): |
| try: |
| result = await li_agent.run(user_msg=message) |
| return str(result) |
| except Exception as e: |
| return f"LlamaIndex Error: {str(e)}" |
|
|
| |
| |
| |
| smol_model = InferenceClientModel( |
| model_id=MODEL_ID, |
| token=HF_TOKEN, |
| provider="together" |
| ) |
|
|
| @tool |
| def weather_tool(location: str) -> str: |
| """Get the current weather for a location. |
| Args: |
| location: The city name. |
| """ |
| return f"The weather in {location} is currently sunny and 22°C." |
|
|
| smol_agent = CodeAgent( |
| model=smol_model, |
| tools=[weather_tool, DuckDuckGoSearchTool()] |
| ) |
|
|
| def chat_smol(message, history): |
| try: |
| response = smol_agent.run(message) |
| return str(response) |
| except Exception as e: |
| return f"Smolagents Error: {str(e)}" |
|
|
| |
| |
| |
| with gr.Blocks(theme=gr.themes.Soft()) as demo: |
| gr.Markdown("# 🤖 Consolidated AI Agent Space") |
| gr.Markdown(f"Currently using **{MODEL_ID}** via Together AI Provider.") |
| |
| with gr.Tab("LlamaIndex (Workflow)"): |
| gr.ChatInterface(fn=chat_llama) |
| |
| with gr.Tab("smolagents (CodeAgent)"): |
| gr.ChatInterface(fn=chat_smol) |
|
|
| if __name__ == "__main__": |
| demo.launch() |