File size: 2,102 Bytes
9a62fc6
5e50775
38812af
50edfc3
38812af
 
ac74b6c
38812af
 
 
5e50775
 
 
 
38812af
 
 
 
 
 
 
 
 
 
 
 
 
ac74b6c
 
 
38812af
 
ac74b6c
38812af
 
 
 
9a62fc6
fa432cf
 
 
 
 
 
 
02edf81
fa432cf
 
 
 
 
 
 
 
 
 
 
 
501c4dc
fa432cf
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import gradio as gr
import os
import random
from smolagents import DuckDuckGoSearchTool, GradioUI, CodeAgent, HfApiModel, LiteLLMModel

# Import our custom tools from their modules
from tools import WeatherInfoTool, HubStatsTool, SoccerNewsTool
from retriever import load_guest_dataset

# Initialize the Hugging Face model
model = LiteLLMModel(
    model_id="gemini/gemini-2.0-flash-lite",
    api_key=os.getenv("GEMINI_API_TOKEN")
)

# Initialize the web search tool
search_tool = DuckDuckGoSearchTool()

# Initialize the weather tool
weather_info_tool = WeatherInfoTool()

# Initialize the Hub stats tool
hub_stats_tool = HubStatsTool()

# Load the guest dataset and initialize the guest info tool
guest_info_tool = load_guest_dataset()

# Initialize the soccre news tool
soccer_news_tool = SoccerNewsTool()

# Create Alfred with all the tools
alfred = CodeAgent(
    tools=[guest_info_tool, weather_info_tool, hub_stats_tool, soccer_news_tool, search_tool], 
    model=model,
    add_base_tools=True,  # Add any additional base tools
    planning_interval=3   # Enable planning every 3 steps
)

# ── Gradio UI with memory-toggle --------------------------------------
def chat(query, reset, history):
    """
    • `query`  : user’s latest message
    • `reset`  : True → wipe conversation memory before answering
    • `history`: running list of (user, assistant) tuples for the Chatbot
    """
    answer = alfred.run(query, reset=reset)
    history = history + [(query, answer)]
    return "", history        # clear textbox, update chat window

with gr.Blocks(css="footer {visibility:hidden;}") as demo:
    gr.Markdown("### 🤖 Alfred – a memory-enabled CodeAgent")
    chatbot = gr.Chatbot(height=400)
    with gr.Row():
        txt = gr.Textbox(placeholder="Ask Alfred anything…", scale=5)
        reset_box = gr.Checkbox(label="Reset memory before this message", value=False)
    txt.submit(chat, [txt, reset_box, chatbot], [txt, chatbot]) \
       .then(lambda: False, None, reset_box)         # (optional) un-tick after send

if __name__ == "__main__":
    demo.launch()