Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import os | |
| import pandas as pd | |
| import datasets | |
| from smolagents import CodeAgent, OpenAIServerModel | |
| from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool, NewsSearchTool | |
| from retriever import load_guest_dataset | |
| # Constants | |
| SAMPLE_FILE = "sample_guests.csv" | |
| # Generate sample dataset if not already present | |
| def generate_sample_guest_csv(): | |
| if not os.path.exists(SAMPLE_FILE): | |
| guest_dataset = datasets.load_dataset("agents-course/unit3-invitees", split="train") | |
| df = pd.DataFrame(guest_dataset) | |
| df.to_csv(SAMPLE_FILE, index=False) | |
| generate_sample_guest_csv() | |
| # Set up model | |
| model = OpenAIServerModel(model_id="gpt-4o") | |
| # Initialize tools | |
| search_tool = DuckDuckGoSearchTool() | |
| weather_info_tool = WeatherInfoTool() | |
| hub_stats_tool = HubStatsTool() | |
| news_tool = NewsSearchTool(api_key=os.getenv("CONTEXTUALWEB_API_KEY")) | |
| # Dynamically create agent with selected guest file | |
| def build_agent(file_path=None): | |
| guest_info_tool = load_guest_dataset(file_path=file_path) | |
| return CodeAgent( | |
| tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool, news_tool], | |
| model=model, | |
| add_base_tools=True, | |
| planning_interval=3 | |
| ) | |
| # Agent instance placeholder | |
| agent_instance = None | |
| # Gradio UI | |
| with gr.Blocks() as demo: | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| gr.Markdown("## Agent interface") | |
| gr.Markdown("This web UI allows you to interact with a `smolagents` agent that can use tools and execute steps to complete tasks.") | |
| gr.File(value=SAMPLE_FILE, label="π₯ Download sample_guests.csv", interactive=False) | |
| guest_file = gr.File(label="π€ Upload guest CSV/JSON", type="filepath", file_types=[".csv", ".json"]) | |
| prompt = gr.Textbox(label="Your request") | |
| submit = gr.Button("Submit") | |
| example_prompts = [ | |
| ["List guests"], | |
| ["Give some examples of conversation starters based on each guest's interests?"], | |
| ["What's the weather like in Amsterdam tonight? Will it be suitable for our fireworks display?"], | |
| ["One of our guests is from Qwen. What can you tell me about their most popular model?"], | |
| ["I need to speak with Dr. Nikola Tesla about recent advancements in wireless energy. Can you help me prepare for this conversation?"], | |
| ] | |
| gr.Examples( | |
| examples=example_prompts, | |
| inputs=[prompt], | |
| label="π‘ Example Prompts sample_guests.csv", | |
| ) | |
| gr.Markdown("Powered by **smolagents**") | |
| with gr.Column(scale=3): | |
| output = gr.Chatbot(label="Agent", type="messages") | |
| def run_query(prompt, file): | |
| global agent_instance | |
| agent_instance = build_agent(file_path=file) | |
| result = agent_instance.run(prompt) | |
| # Handle different result types to convert to string for chatbot output | |
| if isinstance(result, dict): | |
| result = "\n\n".join(f"**{k}**: {v}" for k, v in result.items()) | |
| elif isinstance(result, list): | |
| if all(isinstance(item, dict) and "name" in item and "starter" in item for item in result): | |
| result = "\n\n".join(f"{item['name']}: {item['starter']}" for item in result) | |
| else: | |
| result = str(result) | |
| else: | |
| result = str(result) | |
| # Return as list of message dicts for Gradio chatbot type="messages" | |
| return [ | |
| {"role": "user", "content": prompt}, | |
| {"role": "assistant", "content": result} | |
| ] | |
| submit.click(fn=run_query, inputs=[prompt, guest_file], outputs=output) | |
| if __name__ == "__main__": | |
| demo.launch() | |