BishalRD commited on
Commit
c6f83f2
·
1 Parent(s): 6369fd4

Chat bot with tools

Browse files
Files changed (1) hide show
  1. level 2 conversation bot.py +132 -0
level 2 conversation bot.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from langchain_ollama import ChatOllama
3
+ from langchain_core.tools import tool
4
+ from langgraph.prebuilt import ToolNode, tools_condition
5
+ from langgraph.graph import StateGraph, START, END
6
+ from langgraph.graph.message import MessagesState
7
+ from langgraph.checkpoint.memory import MemorySaver
8
+ from langchain_core.messages import (
9
+ convert_to_openai_messages,
10
+ SystemMessage,
11
+ HumanMessage,
12
+ )
13
+
14
+ @tool
15
+ def add(a: int, b: int) -> int:
16
+ """Add a and b.
17
+
18
+ Args:
19
+ a: first int
20
+ b: second int
21
+ """
22
+ return a + b
23
+
24
+ @tool
25
+ def multiply(a: int, b: int) -> int:
26
+ """Multiply a and b.
27
+
28
+ Args:
29
+ a: first int
30
+ b: second int
31
+ """
32
+ return a * b
33
+
34
+ @tool
35
+ def divide(a: int, b: int) -> int:
36
+ """Divide a by b.
37
+
38
+ Args:
39
+ a: first int
40
+ b: second int
41
+ """
42
+ return a / b
43
+
44
+ @tool
45
+ def subtract(a: int, b: int) -> int:
46
+ """Subtract b from a.
47
+
48
+ Args:
49
+ a: first int
50
+ b: second int
51
+ """
52
+ return a - b
53
+
54
+ @tool
55
+ def square(a: int) -> int:
56
+ """Square a.
57
+
58
+ Args:
59
+ a: first int
60
+ """
61
+ return a * a
62
+
63
+ def create_conversation_graph():
64
+ """
65
+ Create a conversational graph with a memory saver.
66
+ """
67
+ memory = MemorySaver()
68
+ tools = [add, multiply, divide, subtract, square]
69
+
70
+ llm = ChatOllama(model="qwen2.5:3b", temperature=0.5)
71
+ llm_with_tools = llm.bind_tools(tools)
72
+
73
+ sys_msg = SystemMessage(content="You are a helpful assistant tasked with performing arithmetic on a set of inputs.")
74
+
75
+ def assistant(state: MessagesState) -> MessagesState:
76
+ return {"messages": [llm_with_tools.invoke([sys_msg] + state["messages"])]}
77
+
78
+ builder = StateGraph(MessagesState)
79
+
80
+ builder.add_node("assistant", assistant)
81
+ builder.add_node("tools", ToolNode(tools))
82
+
83
+ builder.add_edge(START, "assistant")
84
+ builder.add_conditional_edges("assistant", tools_condition)
85
+ builder.add_edge("tools", "assistant")
86
+
87
+ graph = builder.compile(checkpointer=memory)
88
+ return graph
89
+
90
+
91
+ def create_chat_interface():
92
+ """
93
+ Create and configure the chat interface with the conversation graph.
94
+ """
95
+ graph = create_conversation_graph()
96
+
97
+ # Specify a thread id
98
+ thread_id = "123"
99
+ config = {"configurable": {"thread_id": thread_id}}
100
+
101
+ def chat_with_assistant(message, history):
102
+ """
103
+ Chat with the assistant using the conversational graph.
104
+ """
105
+ # Create a MessagesState with a HumanMessage
106
+ messages_state = MessagesState(messages=[HumanMessage(content=message)])
107
+
108
+ # Invoke the graph with the properly formatted input
109
+ response = graph.invoke(messages_state, config)
110
+
111
+ for msg in response["messages"]:
112
+ msg.pretty_print()
113
+
114
+ # Extract the last message from the response's messages list
115
+ ai_message = response["messages"][-1]
116
+
117
+ # Return just the content of the AI message
118
+ return convert_to_openai_messages(ai_message)
119
+
120
+
121
+ demo = gr.ChatInterface(
122
+ fn=chat_with_assistant,
123
+ type="messages",
124
+ title="Conversational Bot",
125
+ description="Ask anything you want",
126
+ examples=["Hello", "What is your name?", "What is the weather in Tokyo?"],
127
+ )
128
+ return demo
129
+
130
+ if __name__ == "__main__":
131
+ demo = create_chat_interface()
132
+ demo.launch()