Spaces:
Sleeping
Sleeping
File size: 4,380 Bytes
0f8df08 5eff7df 957fd82 a74c27b 957fd82 a74c27b 957fd82 f41bbe8 a74c27b 957fd82 5eff7df 957fd82 efc26d6 957fd82 efc26d6 957fd82 efc26d6 957fd82 0604af9 957fd82 5eff7df 957fd82 5eff7df 0604af9 957fd82 5eff7df 957fd82 0604af9 957fd82 1b74e60 957fd82 1b74e60 957fd82 1b74e60 957fd82 1b74e60 957fd82 1b74e60 0604af9 957fd82 ea6018b 957fd82 ea6018b 2bde7f3 957fd82 5eff7df 957fd82 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 |
import os
from smolagents import CodeAgent, InferenceClientModel
from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool, GuestInfoRetrieverTool, LatestNewsTool
from datasets import load_dataset
from langchain_core.documents import Document
import gradio as gr
# Load model
model = InferenceClientModel(token=os.environ["HUGGINGFACE_API_KEY"])
# Initialize tools
search_tool = DuckDuckGoSearchTool()
weather_info_tool = WeatherInfoTool()
hub_stats_tool = HubStatsTool()
latest_news_tool = LatestNewsTool()
# Load guest dataset and create tool
dataset = load_dataset("agents-course/unit3-invitees")["train"]
docs = [Document(page_content=row["description"]) for row in dataset]
guest_info_tool = GuestInfoRetrieverTool(docs)
# Create Alfred agent
alfred = CodeAgent(
tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool, latest_news_tool],
model=model,
add_base_tools=True,
planning_interval=3
)
# Gradio input-output
def greet(name):
return alfred.run(name)
demo = gr.Interface(
fn=greet,
inputs=gr.Textbox(label="Ask Alfred something..."),
outputs=gr.Textbox(label="Alfred's Response"),
title="🎩 Alfred the Gala Assistant",
description="Ask about guests, weather, hub stats, and more!"
)
demo.launch()
# # Import necessary libraries
# import os
# import random
# from smolagents import CodeAgent, InferenceClientModel
# # # model = InferenceClientModel(token=os.environ["HUGGINGFACE_API_KEY"])
# # model = InferenceClientModel(
# # model="HuggingFaceH4/zephyr-7b-beta", # A great free chat model
# # token=os.environ["HUGGINGFACE_API_KEY"]
# # )
# model = InferenceClientModel(
# model="HuggingFaceH4/zephyr-7b-beta",
# token=os.environ.get("HUGGINGFACE_API_KEY")
# )
# print("✅ Model initialized:", model) # DEBUG LINES
# # Import our custom tools from their modules
# from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool, GuestInfoRetrieverTool, LatestNewsTool
# import gradio as gr
# # Step 3: Integrate the Tool with Alfred
# # Finally, let’s bring everything together by creating our agent and equipping it with our custom tool:
# # # Initialize the Hugging Face model
# # model = InferenceClientModel()
# # # Create Alfred, our gala agent, with the guest info tool
# # alfred = CodeAgent(tools=[guest_info_tool, search_tool, weather_info_tool, hub_stats_tool, latest_news_tool], model=model)
# # # Example query Alfred might receive during the gala
# # response = alfred.run("What's the latest news about quantum computing?.")
# # print("🎩 Alfred's Response:")
# # print(response)
# # Initialize the Hugging Face model
# # model = InferenceClientModel()
# # # Initialize the web search tool
# search_tool = DuckDuckGoSearchTool()
# # # Initialize the weather tool
# weather_info_tool = WeatherInfoTool()
# # # Initialize the Hub stats tool
# hub_stats_tool = HubStatsTool()
# from datasets import load_dataset
# from langchain_core.documents import Document
# # Load the dataset from Hugging Face
# dataset = load_dataset("agents-course/unit3-invitees")["train"]
# # Create Document objects from the "info" column
# docs = [Document(page_content=row["description"]) for row in dataset]
# # Initialize the tool
# guest_info_tool = GuestInfoRetrieverTool(docs)
# # guest_info_tool = GuestInfoRetrieverTool(docs)
# # # Load the guest dataset and initialize the guest info tool
# latest_news_tool = LatestNewsTool()
# # Create Alfred with all the tools
# alfred = CodeAgent(
# tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool, latest_news_tool],
# model=model,
# add_base_tools=True, # Add any additional base tools
# planning_interval=3 # Enable planning every 3 steps
# )
# query = "I need to speak with Dr. Nikola Tesla about recent advancements in wireless energy. Can you help me prepare for this conversation?"
# response = alfred.run(query)
# # print("🎩 Alfred's Response:")
# # print(response)
# # def greet(name):
# # return response
# # # return "Hello " + name + "!!"
# # demo = gr.Interface(fn=greet, inputs="text", outputs="text")
# # demo.launch()
# # DEBUG LINES
# query = "I need to speak with Dr. Nikola Tesla about recent advancements in wireless energy. Can you help me prepare for this conversation?"
# response = alfred.run(query)
# print(response)
|