Alfred / app.py
wishmi1234's picture
Update app.py
957fd82 verified
import os
from smolagents import CodeAgent, InferenceClientModel
from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool, GuestInfoRetrieverTool, LatestNewsTool
from datasets import load_dataset
from langchain_core.documents import Document
import gradio as gr
# Load model
model = InferenceClientModel(token=os.environ["HUGGINGFACE_API_KEY"])
# Initialize tools
search_tool = DuckDuckGoSearchTool()
weather_info_tool = WeatherInfoTool()
hub_stats_tool = HubStatsTool()
latest_news_tool = LatestNewsTool()
# Load guest dataset and create tool
dataset = load_dataset("agents-course/unit3-invitees")["train"]
docs = [Document(page_content=row["description"]) for row in dataset]
guest_info_tool = GuestInfoRetrieverTool(docs)
# Create Alfred agent
alfred = CodeAgent(
tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool, latest_news_tool],
model=model,
add_base_tools=True,
planning_interval=3
)
# Gradio input-output
def greet(name):
return alfred.run(name)
demo = gr.Interface(
fn=greet,
inputs=gr.Textbox(label="Ask Alfred something..."),
outputs=gr.Textbox(label="Alfred's Response"),
title="🎩 Alfred the Gala Assistant",
description="Ask about guests, weather, hub stats, and more!"
)
demo.launch()
# # Import necessary libraries
# import os
# import random
# from smolagents import CodeAgent, InferenceClientModel
# # # model = InferenceClientModel(token=os.environ["HUGGINGFACE_API_KEY"])
# # model = InferenceClientModel(
# # model="HuggingFaceH4/zephyr-7b-beta", # A great free chat model
# # token=os.environ["HUGGINGFACE_API_KEY"]
# # )
# model = InferenceClientModel(
# model="HuggingFaceH4/zephyr-7b-beta",
# token=os.environ.get("HUGGINGFACE_API_KEY")
# )
# print("✅ Model initialized:", model) # DEBUG LINES
# # Import our custom tools from their modules
# from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool, GuestInfoRetrieverTool, LatestNewsTool
# import gradio as gr
# # Step 3: Integrate the Tool with Alfred
# # Finally, let’s bring everything together by creating our agent and equipping it with our custom tool:
# # # Initialize the Hugging Face model
# # model = InferenceClientModel()
# # # Create Alfred, our gala agent, with the guest info tool
# # alfred = CodeAgent(tools=[guest_info_tool, search_tool, weather_info_tool, hub_stats_tool, latest_news_tool], model=model)
# # # Example query Alfred might receive during the gala
# # response = alfred.run("What's the latest news about quantum computing?.")
# # print("🎩 Alfred's Response:")
# # print(response)
# # Initialize the Hugging Face model
# # model = InferenceClientModel()
# # # Initialize the web search tool
# search_tool = DuckDuckGoSearchTool()
# # # Initialize the weather tool
# weather_info_tool = WeatherInfoTool()
# # # Initialize the Hub stats tool
# hub_stats_tool = HubStatsTool()
# from datasets import load_dataset
# from langchain_core.documents import Document
# # Load the dataset from Hugging Face
# dataset = load_dataset("agents-course/unit3-invitees")["train"]
# # Create Document objects from the "info" column
# docs = [Document(page_content=row["description"]) for row in dataset]
# # Initialize the tool
# guest_info_tool = GuestInfoRetrieverTool(docs)
# # guest_info_tool = GuestInfoRetrieverTool(docs)
# # # Load the guest dataset and initialize the guest info tool
# latest_news_tool = LatestNewsTool()
# # Create Alfred with all the tools
# alfred = CodeAgent(
# tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool, latest_news_tool],
# model=model,
# add_base_tools=True, # Add any additional base tools
# planning_interval=3 # Enable planning every 3 steps
# )
# query = "I need to speak with Dr. Nikola Tesla about recent advancements in wireless energy. Can you help me prepare for this conversation?"
# response = alfred.run(query)
# # print("🎩 Alfred's Response:")
# # print(response)
# # def greet(name):
# # return response
# # # return "Hello " + name + "!!"
# # demo = gr.Interface(fn=greet, inputs="text", outputs="text")
# # demo.launch()
# # DEBUG LINES
# query = "I need to speak with Dr. Nikola Tesla about recent advancements in wireless energy. Can you help me prepare for this conversation?"
# response = alfred.run(query)
# print(response)