Sofpast commited on
Commit
e44cbb4
·
1 Parent(s): 246d505
__pycache__/retriever.cpython-310.pyc ADDED
Binary file (2.22 kB). View file
 
__pycache__/tools.cpython-310.pyc ADDED
Binary file (2.15 kB). View file
 
app.py CHANGED
@@ -1,27 +1,42 @@
1
- import gradio as gr
2
  import random
3
- from smolagents import GradioUI, CodeAgent, HfApiModel
4
 
 
5
  from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
6
  from retriever import load_guest_dataset
 
 
 
7
 
8
- model = HfApiModel()
 
9
 
 
 
 
 
10
  search_tool = DuckDuckGoSearchTool()
11
 
 
12
  weather_info_tool = WeatherInfoTool()
13
 
 
14
  hub_stats_tool = HubStatsTool()
15
 
 
16
  guest_info_tool = load_guest_dataset()
17
 
18
  # Create Alfred with all the tools
19
  alfred = CodeAgent(
20
- tools = [guest_info_tool, weather_info_tool, hub_stats_tool, search_tool],
21
- model = model,
22
- add_base_tools = True,
23
- planning_interval = 3
24
  )
25
 
26
- if __name__="__name__":
27
- GradioUI(alfred).launch()
 
 
 
 
1
+ # Import necessary libraries
2
  import random
3
+ from smolagents import CodeAgent, InferenceClientModel
4
 
5
+ # Import our custom tools from their modules
6
  from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
7
  from retriever import load_guest_dataset
8
+ from dotenv import load_dotenv
9
+ import os
10
+ load_dotenv()
11
 
12
+ # Load the Hugging Face API key from environment variables
13
+ api_key = os.getenv("HUGGINGFACE_API_KEY")
14
 
15
+ # Initialize the Hugging Face model
16
+ model = InferenceClientModel(token=api_key)
17
+
18
+ # Initialize the web search tool
19
  search_tool = DuckDuckGoSearchTool()
20
 
21
+ # Initialize the weather tool
22
  weather_info_tool = WeatherInfoTool()
23
 
24
+ # Initialize the Hub stats tool
25
  hub_stats_tool = HubStatsTool()
26
 
27
+ # Load the guest dataset and initialize the guest info tool
28
  guest_info_tool = load_guest_dataset()
29
 
30
  # Create Alfred with all the tools
31
  alfred = CodeAgent(
32
+ tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool],
33
+ model=model,
34
+ add_base_tools=True, # Add any additional base tools
35
+ planning_interval=3 # Enable planning every 3 steps
36
  )
37
 
38
+ query = "I need to speak with Dr. Nikola Tesla about recent advancements in wireless energy. Can you help me prepare for this conversation?"
39
+ response = alfred.run(query)
40
+
41
+ print("🎩 Alfred's Response:")
42
+ print(response)
retriever.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import datasets
2
+ from langchain.docstore.document import Document
3
+ from smolagents import Tool
4
+ from langchain_community.retrievers import BM25Retriever
5
+ from smolagents import CodeAgent, InferenceClientModel
6
+ import os
7
+ # from huggingface_hub import HfApi, InferenceClient
8
+ from dotenv import load_dotenv
9
+ # import os
10
+
11
+ load_dotenv()
12
+ # Load the Hugging Face API key from environment variables
13
+ api_key = os.getenv("HUGGINGFACE_API_KEY")
14
+
15
+
16
+ class GuestInfoRetrieverTool(Tool):
17
+ name = "guest_info_retriever"
18
+ description = "Retrieves detailed information about gala guests based on their name or relation."
19
+ inputs = {
20
+ "query": {
21
+ "type": "string",
22
+ "description": "The name or relation of the guest you want information about."
23
+ }
24
+ }
25
+ output_type = "string"
26
+
27
+ def __init__(self, docs):
28
+ self.is_initialized = False
29
+ self.retriever = BM25Retriever.from_documents(docs)
30
+
31
+ def forward(self, query: str):
32
+ results = self.retriever.get_relevant_documents(query)
33
+ if results:
34
+ return "\n\n".join([doc.page_content for doc in results[:3]])
35
+ else:
36
+ return "No matching guest information found."
37
+
38
+ def load_guest_dataset():
39
+ # Load the dataset
40
+ guest_dataset = datasets.load_dataset("agents-course/unit3-invitees", split="train")
41
+
42
+ # Convert dataset entries into Document objects
43
+ docs = [
44
+ Document(
45
+ page_content="\n".join([
46
+ f"Name: {guest['name']}",
47
+ f"Relation: {guest['relation']}",
48
+ f"Description: {guest['description']}",
49
+ f"Email: {guest['email']}"
50
+ ]),
51
+ metadata={"name": guest["name"]}
52
+ )
53
+ for guest in guest_dataset
54
+ ]
55
+
56
+ # Return the tool
57
+ return GuestInfoRetrieverTool(docs)
58
+
59
+ # Initialize the tool
60
+ # guest_info_tool = GuestInfoRetrieverTool(docs)
61
+
62
+
63
+ # Initialize the Hugging Face model
64
+ model = InferenceClientModel(token=api_key)
65
+
66
+ # Create Alfred, our gala agent, with the guest info tool
67
+ # alfred = CodeAgent(tools=[guest_info_tool], model=model,
68
+ # )
69
+
70
+ # Example query Alfred might receive during the gala
71
+ # response = alfred.run("Tell me about our guest named 'Nhung ham'.")
72
+
73
+ # print("🎩 Alfred's Response:")
74
+ # print(response)
tools.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from smolagents import DuckDuckGoSearchTool
2
+ from smolagents import Tool
3
+ import random
4
+ # from smolagents import Tool
5
+ from huggingface_hub import list_models
6
+ from dotenv import load_dotenv
7
+ import os
8
+ load_dotenv()
9
+ from smolagents import CodeAgent, InferenceClientModel
10
+
11
+ # Load the Hugging Face API key from environment variables
12
+ api_key = os.getenv("HUGGINGFACE_API_KEY")
13
+
14
+ # Initialize the DuckDuckGo search tool
15
+ search_tool = DuckDuckGoSearchTool()
16
+
17
+ # Example usage
18
+ # results = search_tool("Who's the current President of France?")
19
+ # print(results)
20
+
21
+ class WeatherInfoTool(Tool):
22
+ name = "weather_info"
23
+ description = "Fetches dummy weather information for a given location."
24
+ inputs = {
25
+ "location": {
26
+ "type": "string",
27
+ "description": "The location to get weather information for."
28
+ }
29
+ }
30
+ output_type = "string"
31
+
32
+ def forward(self, location: str):
33
+ # Dummy weather data
34
+ weather_conditions = [
35
+ {"condition": "Rainy", "temp_c": 15},
36
+ {"condition": "Clear", "temp_c": 25},
37
+ {"condition": "Windy", "temp_c": 20}
38
+ ]
39
+ # Randomly select a weather condition
40
+ data = random.choice(weather_conditions)
41
+ return f"Weather in {location}: {data['condition']}, {data['temp_c']}°C"
42
+
43
+ # Initialize the tool
44
+ weather_info_tool = WeatherInfoTool()
45
+
46
+ class HubStatsTool(Tool):
47
+ name = "hub_stats"
48
+ description = "Fetches the most downloaded model from a specific author on the Hugging Face Hub."
49
+ inputs = {
50
+ "author": {
51
+ "type": "string",
52
+ "description": "The username of the model author/organization to find models from."
53
+ }
54
+ }
55
+ output_type = "string"
56
+
57
+ def forward(self, author: str):
58
+ try:
59
+ # List models from the specified author, sorted by downloads
60
+ models = list(list_models(author=author, sort="downloads", direction=-1, limit=1))
61
+
62
+ if models:
63
+ model = models[0]
64
+ return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads."
65
+ else:
66
+ return f"No models found for author {author}."
67
+ except Exception as e:
68
+ return f"Error fetching models for {author}: {str(e)}"
69
+
70
+ # Initialize the tool
71
+ hub_stats_tool = HubStatsTool()
72
+
73
+ # Example usage
74
+ # print(hub_stats_tool("facebook")) # Example: Get the most downloaded model by Facebook
75
+
76
+ # Initialize the Hugging Face model
77
+ model = InferenceClientModel(token=api_key)
78
+
79
+ # Create Alfred with all the tools
80
+ alfred = CodeAgent(
81
+ tools=[search_tool, weather_info_tool, hub_stats_tool],
82
+ model=model
83
+ )
84
+
85
+ # Example query Alfred might receive during the gala
86
+ # response = alfred.run("I'am planning a trip to Paris. What is the weathere there, and can you tell me who the current mayor is? Also, what's the most popular machine learning model from French researchers?")
87
+
88
+ # print("🎩 Alfred's Response:")
89
+ # print(response)