Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import os | |
| import random | |
| from smolagents import DuckDuckGoSearchTool, GradioUI, CodeAgent, HfApiModel, LiteLLMModel | |
| # Import our custom tools from their modules | |
| from tools import WeatherInfoTool, HubStatsTool, SoccerNewsTool | |
| from retriever import load_guest_dataset | |
| # Initialize the Hugging Face model | |
| model = LiteLLMModel( | |
| model_id="gemini/gemini-2.0-flash-lite", | |
| api_key=os.getenv("GEMINI_API_TOKEN") | |
| ) | |
| # Initialize the web search tool | |
| search_tool = DuckDuckGoSearchTool() | |
| # Initialize the weather tool | |
| weather_info_tool = WeatherInfoTool() | |
| # Initialize the Hub stats tool | |
| hub_stats_tool = HubStatsTool() | |
| # Load the guest dataset and initialize the guest info tool | |
| guest_info_tool = load_guest_dataset() | |
| # Initialize the soccre news tool | |
| soccer_news_tool = SoccerNewsTool() | |
| # Create Alfred with all the tools | |
| alfred = CodeAgent( | |
| tools=[guest_info_tool, weather_info_tool, hub_stats_tool, soccer_news_tool, search_tool], | |
| model=model, | |
| add_base_tools=True, # Add any additional base tools | |
| planning_interval=3 # Enable planning every 3 steps | |
| ) | |
| # ── Gradio UI with memory-toggle -------------------------------------- | |
| def chat(query, reset, history): | |
| """ | |
| • `query` : user’s latest message | |
| • `reset` : True → wipe conversation memory before answering | |
| • `history`: running list of (user, assistant) tuples for the Chatbot | |
| """ | |
| answer = alfred.run(query, reset=reset) | |
| history = history + [(query, answer)] | |
| return "", history # clear textbox, update chat window | |
| with gr.Blocks(css="footer {visibility:hidden;}") as demo: | |
| gr.Markdown("### 🤖 Alfred – a memory-enabled CodeAgent") | |
| chatbot = gr.Chatbot(height=400) | |
| with gr.Row(): | |
| txt = gr.Textbox(placeholder="Ask Alfred anything…", scale=5) | |
| reset_box = gr.Checkbox(label="Reset memory before this message", value=False) | |
| txt.submit(chat, [txt, reset_box, chatbot], [txt, chatbot]) \ | |
| .then(lambda: False, None, reset_box) # (optional) un-tick after send | |
| if __name__ == "__main__": | |
| demo.launch() |