BishalRD commited on
Commit
20234ab
·
1 Parent(s): bc5fb4a

Add weather tool and web search tool

Browse files
Files changed (1) hide show
  1. level 3 conversation bot.py +107 -0
level 3 conversation bot.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from langchain_ollama import ChatOllama
3
+ from langchain_core.tools import tool
4
+ from langgraph.prebuilt import ToolNode, tools_condition
5
+ from langgraph.graph import StateGraph, START, END
6
+ from langgraph.graph.message import MessagesState
7
+ from langgraph.checkpoint.memory import MemorySaver
8
+ from langchain_core.messages import (
9
+ convert_to_openai_messages,
10
+ SystemMessage,
11
+ HumanMessage,
12
+ )
13
+ from langchain_community.tools.tavily_search import TavilySearchResults
14
+ from langchain_community.utilities import OpenWeatherMapAPIWrapper
15
+
16
+ from dotenv import load_dotenv
17
+ load_dotenv()
18
+
19
+ @tool
20
+ def web_search_tool(query: str) -> str:
21
+ """Search the web for information."""
22
+ tavily_search = TavilySearchResults(max_results=3)
23
+ search_docs = tavily_search.invoke(query)
24
+ return search_docs
25
+
26
+ @tool
27
+ def weather_tool(city: str) -> str:
28
+ """Get the weather in a given city.
29
+ Args:
30
+ city: The city to get the weather for.
31
+ Example: "London,GB", "New York,US"
32
+ """
33
+
34
+ weather_api = OpenWeatherMapAPIWrapper()
35
+ weather_info = weather_api.run(city)
36
+ return weather_info
37
+
38
+ def create_conversation_graph():
39
+ """
40
+ Create a conversational graph with a memory saver.
41
+ """
42
+ memory = MemorySaver()
43
+ tools = [weather_tool, web_search_tool]
44
+
45
+ llm = ChatOllama(model="qwen2.5:3b", temperature=0.5)
46
+ llm_with_tools = llm.bind_tools(tools)
47
+
48
+ sys_msg = SystemMessage(content="You are a helpful assistant tasked with performing arithmetic on a set of inputs.")
49
+
50
+ def assistant(state: MessagesState) -> MessagesState:
51
+ return {"messages": [llm_with_tools.invoke([sys_msg] + state["messages"])]}
52
+
53
+ builder = StateGraph(MessagesState)
54
+
55
+ builder.add_node("assistant", assistant)
56
+ builder.add_node("tools", ToolNode(tools))
57
+
58
+ builder.add_edge(START, "assistant")
59
+ builder.add_conditional_edges("assistant", tools_condition)
60
+ builder.add_edge("tools", "assistant")
61
+
62
+ graph = builder.compile(checkpointer=memory)
63
+ return graph
64
+
65
+
66
+ def create_chat_interface():
67
+ """
68
+ Create and configure the chat interface with the conversation graph.
69
+ """
70
+ graph = create_conversation_graph()
71
+
72
+ # Specify a thread id
73
+ thread_id = "123"
74
+ config = {"configurable": {"thread_id": thread_id}}
75
+
76
+ def chat_with_assistant(message, history):
77
+ """
78
+ Chat with the assistant using the conversational graph.
79
+ """
80
+ # Create a MessagesState with a HumanMessage
81
+ messages_state = MessagesState(messages=[HumanMessage(content=message)])
82
+
83
+ # Invoke the graph with the properly formatted input
84
+ response = graph.invoke(messages_state, config)
85
+
86
+ for msg in response["messages"]:
87
+ msg.pretty_print()
88
+
89
+ # Extract the last message from the response's messages list
90
+ ai_message = response["messages"][-1]
91
+
92
+ # Return just the content of the AI message
93
+ return convert_to_openai_messages(ai_message)
94
+
95
+
96
+ demo = gr.ChatInterface(
97
+ fn=chat_with_assistant,
98
+ type="messages",
99
+ title="Conversational Bot",
100
+ description="Ask anything you want",
101
+ examples=["Hello", "What is your name?", "What is the weather in Tokyo?"],
102
+ )
103
+ return demo
104
+
105
+ if __name__ == "__main__":
106
+ demo = create_chat_interface()
107
+ demo.launch()