Files changed (2) hide show
  1. app1.py +68 -0
  2. tool.py +54 -0
app1.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import TypedDict, Annotated
2
+ from langgraph.graph.message import add_messages
3
+ from langchain_core.messages import AnyMessage, HumanMessage, AIMessage
4
+ from langgraph.prebuilt import ToolNode
5
+ from langgraph.graph import START, StateGraph
6
+ from langgraph.prebuilt import tools_condition
7
+ from langgraph.checkpoint.memory import InMemorySaver
8
+ from langchain_ollama import ChatOllama
9
+ from tool import DuckDuckGoSearchRun,web_search_tool,latest_news_tool, get_weather_tool as weather_info_tool, hub_stats_tool
10
+ from retriever import guest_info_tool_1
11
+
12
+
13
+
14
+
15
+ model = ChatOllama(
16
+ model="qwen2.5:1.5b", # Or try other Ollama-supported models
17
+ base_url="http://127.0.0.1:11434", # Default Ollama local server
18
+ num_predict=256
19
+ )
20
+
21
+ tools=[weather_info_tool,web_search_tool,weather_info_tool,hub_stats_tool,guest_info_tool_1]
22
+ model_with_tool = model.bind_tools(tools)
23
+
24
+ class AgentState(TypedDict):
25
+ messages: Annotated[list[AnyMessage],add_messages]
26
+
27
+
28
+ def assistant(state: AgentState):
29
+ return {
30
+ "messages": [model_with_tool.invoke(state["messages"])],
31
+ }
32
+
33
+ builder = StateGraph(AgentState)
34
+
35
+ builder.add_node("assistant",assistant)
36
+ builder.add_node('tools',ToolNode(tools))
37
+
38
+ builder.add_edge(START,'assistant')
39
+ builder.add_conditional_edges(
40
+ 'assistant',
41
+ tools_condition
42
+ )
43
+ builder.add_edge('tools','assistant')
44
+
45
+ checkpointer = InMemorySaver()
46
+
47
+ alfred = builder.compile(checkpointer=checkpointer)
48
+ thread_config = {"configurable": {"thread_id": "1"}}
49
+
50
+ print("🎩 Alfred: Hello, I am Alfred. How can I assist you today?")
51
+ print("Type 'exit' or 'quit' to stop.\n")
52
+
53
+ while True:
54
+ user_input = input("You: ")
55
+
56
+ if user_input.lower() in ["exit", "quit"]:
57
+ print("Alfred: Goodbye.")
58
+ break
59
+
60
+ response = alfred.invoke(
61
+ {"messages": [HumanMessage(content=user_input)]},
62
+ thread_config
63
+ )
64
+
65
+ ai_reply = response["messages"][-1].content
66
+
67
+ print("\n🎩 Alfred:", ai_reply)
68
+ print("-" * 40)
tool.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_core.tools import tool
2
+ from langchain_community.tools import DuckDuckGoSearchRun
3
+ import random
4
+ from huggingface_hub import list_models
5
+
6
+ search = DuckDuckGoSearchRun()
7
+
8
+ @tool
9
+ def web_search_tool(query: str) -> str:
10
+ """Search the web for information about unfamiliar guests."""
11
+ return search.run(query)
12
+
13
+ @tool
14
+ def latest_news_tool(topic: str) -> str:
15
+ """
16
+ Get the latest news about a specific topic.
17
+ """
18
+
19
+ query = f"latest news about {topic}"
20
+
21
+ results = search.run(query)
22
+
23
+ return results
24
+
25
+
26
+ @tool
27
+ def get_weather_tool(location:str)-> str:
28
+ """Fetches weather information for a given location."""
29
+
30
+ weather_condition =[
31
+ {"condition": "Rainy", "temp_c": 15},
32
+ {"condition": "Clear", "temp_c": 25},
33
+ {"condition": "Windy", "temp_c": 20}
34
+ ]
35
+ data = random.choice(weather_condition)
36
+ return f"Wrather in {location} is {data['condition']} with temp : {data['temp_c']}c"
37
+
38
+
39
+ @tool
40
+ def hub_stats_tool(author: str) -> str:
41
+ """Fetches the most downloaded model from a specific author on the Hugging Face Hub."""
42
+ try:
43
+ # List models from the specified author, sorted by downloads
44
+ models = list(list_models(author=author, sort="downloads", direction=-1, limit=1))
45
+
46
+ if models:
47
+ model = models[0]
48
+ return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads."
49
+ else:
50
+ return f"No models found for author {author}."
51
+ except Exception as e:
52
+ return f"Error fetching models for {author}: {str(e)}"
53
+
54
+