Spaces:
Runtime error
Runtime error
ernani commited on
Commit ·
53e52ec
1
Parent(s): e7c51fb
updating tools to use langgraph
Browse files
app.py
CHANGED
|
@@ -9,7 +9,7 @@ from langgraph.graph import START, StateGraph
|
|
| 9 |
from langgraph.prebuilt import tools_condition
|
| 10 |
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
|
| 11 |
|
| 12 |
-
from tools import
|
| 13 |
from retriever import load_guest_dataset
|
| 14 |
|
| 15 |
# Initialize the Hugging Face model
|
|
@@ -22,24 +22,11 @@ llm = HuggingFaceEndpoint(
|
|
| 22 |
|
| 23 |
chat = ChatHuggingFace(llm=llm, verbose=True)
|
| 24 |
|
| 25 |
-
#
|
| 26 |
-
memory_management_tool = MemoryManagementTool()
|
| 27 |
-
|
| 28 |
-
# Initialize the web search tool
|
| 29 |
-
search_tool = DuckDuckGoSearchTool()
|
| 30 |
-
|
| 31 |
-
# Initialize the weather tool
|
| 32 |
-
weather_info_tool = WeatherInfoTool()
|
| 33 |
-
|
| 34 |
-
# Initialize the Hub stats tool
|
| 35 |
-
hub_stats_tool = HubStatsTool()
|
| 36 |
|
| 37 |
-
# Load the guest dataset and initialize the guest info tool
|
| 38 |
guest_info_tool = load_guest_dataset()
|
| 39 |
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
tools = [guest_info_tool, weather_info_tool, hub_stats_tool, search_tool, memory_management_tool]
|
| 43 |
chat_with_tools = chat.bind_tools(tools)
|
| 44 |
|
| 45 |
# Generate the AgentState and Agent graph
|
|
|
|
| 9 |
from langgraph.prebuilt import tools_condition
|
| 10 |
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
|
| 11 |
|
| 12 |
+
from tools import weather_info_tool, hub_stats_tool, memory_management_tool, duckduckgo_search_tool
|
| 13 |
from retriever import load_guest_dataset
|
| 14 |
|
| 15 |
# Initialize the Hugging Face model
|
|
|
|
| 22 |
|
| 23 |
chat = ChatHuggingFace(llm=llm, verbose=True)
|
| 24 |
|
| 25 |
+
# adding tools
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
|
|
|
|
| 27 |
guest_info_tool = load_guest_dataset()
|
| 28 |
|
| 29 |
+
tools = [guest_info_tool, weather_info_tool, hub_stats_tool, duckduckgo_search_tool, memory_management_tool]
|
|
|
|
|
|
|
| 30 |
chat_with_tools = chat.bind_tools(tools)
|
| 31 |
|
| 32 |
# Generate the AgentState and Agent graph
|
tools.py
CHANGED
|
@@ -1,78 +1,76 @@
|
|
| 1 |
-
from
|
| 2 |
-
from
|
|
|
|
| 3 |
import random
|
| 4 |
from huggingface_hub import list_models
|
| 5 |
-
from langgraph.store.memory import InMemoryStore
|
| 6 |
|
| 7 |
# Initialize the DuckDuckGo search tool
|
| 8 |
search_tool = DuckDuckGoSearchTool()
|
| 9 |
-
|
| 10 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
)
|
| 12 |
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
}
|
| 22 |
-
output_type = "string"
|
| 23 |
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
{"condition": "Windy", "temp_c": 20}
|
| 30 |
-
]
|
| 31 |
-
# Randomly select a weather condition
|
| 32 |
-
data = random.choice(weather_conditions)
|
| 33 |
-
return f"Weather in {location}: {data['condition']}, {data['temp_c']}°C"
|
| 34 |
|
| 35 |
-
class HubStatsTool(Tool):
|
| 36 |
-
name = "hub_stats"
|
| 37 |
-
description = "Fetches the most downloaded model from a specific author on the Hugging Face Hub."
|
| 38 |
-
inputs = {
|
| 39 |
-
"author": {
|
| 40 |
-
"type": "string",
|
| 41 |
-
"description": "The username of the model author/organization to find models from."
|
| 42 |
-
}
|
| 43 |
-
}
|
| 44 |
-
output_type = "string"
|
| 45 |
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
models = list(list_models(author=author, sort="downloads", direction=-1, limit=1))
|
| 50 |
-
|
| 51 |
-
if models:
|
| 52 |
-
model = models[0]
|
| 53 |
-
return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads."
|
| 54 |
-
else:
|
| 55 |
-
return f"No models found for author {author}."
|
| 56 |
-
except Exception as e:
|
| 57 |
-
return f"Error fetching models for {author}: {str(e)}"
|
| 58 |
|
| 59 |
-
|
| 60 |
-
name
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
"type": "string",
|
| 65 |
-
"description": "The query to search in memory."
|
| 66 |
-
}
|
| 67 |
-
}
|
| 68 |
-
output_type = "string"
|
| 69 |
|
| 70 |
-
def forward(self, query: str):
|
| 71 |
-
# Retrieve relevant memory entries
|
| 72 |
-
results = store.retrieve(query)
|
| 73 |
-
if results:
|
| 74 |
-
return "\n\n".join(results)
|
| 75 |
-
else:
|
| 76 |
-
return "No relevant memory found."
|
| 77 |
|
| 78 |
|
|
|
|
| 1 |
+
from langchain.tools import Tool
|
| 2 |
+
from langchain_community.tools.duckduckgo_search import DuckDuckGoSearchTool
|
| 3 |
+
from langgraph.store.memory import InMemoryStore
|
| 4 |
import random
|
| 5 |
from huggingface_hub import list_models
|
|
|
|
| 6 |
|
| 7 |
# Initialize the DuckDuckGo search tool
|
| 8 |
search_tool = DuckDuckGoSearchTool()
|
| 9 |
+
|
| 10 |
+
# Initialize the memory store
|
| 11 |
+
store = InMemoryStore(index={"embed": "openai:text-embedding-3-small"})
|
| 12 |
+
|
| 13 |
+
# Define the weather information tool
|
| 14 |
+
def get_weather_info(location: str) -> str:
|
| 15 |
+
"""Fetches dummy weather information for a given location."""
|
| 16 |
+
weather_conditions = [
|
| 17 |
+
{"condition": "Rainy", "temp_c": 15},
|
| 18 |
+
{"condition": "Clear", "temp_c": 25},
|
| 19 |
+
{"condition": "Windy", "temp_c": 20}
|
| 20 |
+
]
|
| 21 |
+
data = random.choice(weather_conditions)
|
| 22 |
+
return f"Weather in {location}: {data['condition']}, {data['temp_c']}°C"
|
| 23 |
+
|
| 24 |
+
weather_info_tool = Tool(
|
| 25 |
+
name="get_weather_info",
|
| 26 |
+
func=get_weather_info,
|
| 27 |
+
description="Fetches dummy weather information for a given location."
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
# Define the Hub stats tool
|
| 31 |
+
def get_hub_stats(author: str) -> str:
|
| 32 |
+
"""Fetches the most downloaded model from a specific author on the Hugging Face Hub."""
|
| 33 |
+
try:
|
| 34 |
+
models = list(list_models(author=author, sort="downloads", direction=-1, limit=1))
|
| 35 |
+
if models:
|
| 36 |
+
model = models[0]
|
| 37 |
+
return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads."
|
| 38 |
+
else:
|
| 39 |
+
return f"No models found for author {author}."
|
| 40 |
+
except Exception as e:
|
| 41 |
+
return f"Error fetching models for {author}: {str(e)}"
|
| 42 |
+
|
| 43 |
+
hub_stats_tool = Tool(
|
| 44 |
+
name="get_hub_stats",
|
| 45 |
+
func=get_hub_stats,
|
| 46 |
+
description="Fetches the most downloaded model from a specific author on the Hugging Face Hub."
|
| 47 |
)
|
| 48 |
|
| 49 |
+
# Define the memory management tool
|
| 50 |
+
def memory_management(query: str) -> str:
|
| 51 |
+
"""Manages and queries conversation memory."""
|
| 52 |
+
results = store.retrieve(query)
|
| 53 |
+
if results:
|
| 54 |
+
return "\n\n".join(results)
|
| 55 |
+
else:
|
| 56 |
+
return "No relevant memory found."
|
|
|
|
|
|
|
| 57 |
|
| 58 |
+
memory_management_tool = Tool(
|
| 59 |
+
name="memory_management",
|
| 60 |
+
func=memory_management,
|
| 61 |
+
description="Manages and queries conversation memory."
|
| 62 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 63 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 64 |
|
| 65 |
+
def duckduckgo_search(query: str) -> str:
|
| 66 |
+
"""Searches the web for information."""
|
| 67 |
+
return search_tool.run(query)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 68 |
|
| 69 |
+
duckduckgo_search_tool = Tool(
|
| 70 |
+
name="duckduckgo_search",
|
| 71 |
+
func=duckduckgo_search,
|
| 72 |
+
description="Searches the web for information."
|
| 73 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 74 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 75 |
|
| 76 |
|