| import gradio as gr |
| from langchain_ollama import ChatOllama |
| from langchain_core.tools import tool |
| from langgraph.prebuilt import ToolNode, tools_condition |
| from langgraph.graph import StateGraph, START, END |
| from langgraph.graph.message import MessagesState |
| from langgraph.checkpoint.memory import MemorySaver |
| from langchain_core.messages import ( |
| convert_to_openai_messages, |
| SystemMessage, |
| HumanMessage, |
| ) |
|
|
| @tool |
| def add(a: int, b: int) -> int: |
| """Add a and b. |
| |
| Args: |
| a: first int |
| b: second int |
| """ |
| return a + b |
|
|
| @tool |
| def multiply(a: int, b: int) -> int: |
| """Multiply a and b. |
| |
| Args: |
| a: first int |
| b: second int |
| """ |
| return a * b |
|
|
| @tool |
| def divide(a: int, b: int) -> int: |
| """Divide a by b. |
| |
| Args: |
| a: first int |
| b: second int |
| """ |
| return a / b |
|
|
| @tool |
| def subtract(a: int, b: int) -> int: |
| """Subtract b from a. |
| |
| Args: |
| a: first int |
| b: second int |
| """ |
| return a - b |
|
|
| @tool |
| def square(a: int) -> int: |
| """Square a. |
| |
| Args: |
| a: first int |
| """ |
| return a * a |
|
|
| def create_conversation_graph(): |
| """ |
| Create a conversational graph with a memory saver. |
| """ |
| memory = MemorySaver() |
| tools = [add, multiply, divide, subtract, square] |
|
|
| llm = ChatOllama(model="qwen2.5:3b", temperature=0.5) |
| llm_with_tools = llm.bind_tools(tools) |
|
|
| sys_msg = SystemMessage(content="You are a helpful assistant tasked with performing arithmetic on a set of inputs.") |
|
|
| def assistant(state: MessagesState) -> MessagesState: |
| return {"messages": [llm_with_tools.invoke([sys_msg] + state["messages"])]} |
|
|
| builder = StateGraph(MessagesState) |
|
|
| builder.add_node("assistant", assistant) |
| builder.add_node("tools", ToolNode(tools)) |
|
|
| builder.add_edge(START, "assistant") |
| builder.add_conditional_edges("assistant", tools_condition) |
| builder.add_edge("tools", "assistant") |
|
|
| graph = builder.compile(checkpointer=memory) |
| return graph |
|
|
|
|
| def create_chat_interface(): |
| """ |
| Create and configure the chat interface with the conversation graph. |
| """ |
| graph = create_conversation_graph() |
|
|
| |
| thread_id = "123" |
| config = {"configurable": {"thread_id": thread_id}} |
|
|
| def chat_with_assistant(message, history): |
| """ |
| Chat with the assistant using the conversational graph. |
| """ |
| |
| messages_state = MessagesState(messages=[HumanMessage(content=message)]) |
|
|
| |
| response = graph.invoke(messages_state, config) |
|
|
| for msg in response["messages"]: |
| msg.pretty_print() |
|
|
| |
| ai_message = response["messages"][-1] |
|
|
| |
| return convert_to_openai_messages(ai_message) |
|
|
|
|
| demo = gr.ChatInterface( |
| fn=chat_with_assistant, |
| type="messages", |
| title="Conversational Bot", |
| description="Ask anything you want", |
| examples=["Hello", "What is your name?", "What is the weather in Tokyo?"], |
| ) |
| return demo |
|
|
| if __name__ == "__main__": |
| demo = create_chat_interface() |
| demo.launch() |
|
|