Spaces:
Sleeping
Sleeping
Commit
·
b0540b3
1
Parent(s):
a75702e
basic chatbot example - langgraph
Browse files- README.md +13 -3
- app.py +51 -3
- basic_chatbot.png +0 -0
- chatbot_with_search_tool.png +0 -0
- configfile.ini +1 -1
- src/basic_bot/chatbot_node.py +15 -0
- src/basic_bot/chatbot_with_tool_node.py +38 -0
- src/graph/graph_builder.py +66 -0
- src/state/state.py +9 -0
- src/streamlitui/loadui.py +7 -0
- src/tools/search_tool.py +15 -0
README.md
CHANGED
|
@@ -13,11 +13,21 @@ short_description: Langgraph
|
|
| 13 |
|
| 14 |
# Langgraph
|
| 15 |
## UseCases
|
| 16 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
reference:
|
| 18 |

|
| 19 |
|
| 20 |
-
###
|
| 21 |
reference:
|
| 22 |

|
| 23 |
-

|
| 20 |
+
|
| 21 |
+
### 2. Chatbt with Tool
|
| 22 |
+
reference:
|
| 23 |
+

|
| 24 |
+
|
| 25 |
+
### 3. Appointment Receptionist
|
| 26 |
reference:
|
| 27 |

|
| 28 |
|
| 29 |
+
### 4. Customer Support
|
| 30 |
reference:
|
| 31 |

|
| 32 |
+

|
| 33 |
+
|
app.py
CHANGED
|
@@ -1,15 +1,18 @@
|
|
| 1 |
import streamlit as st
|
| 2 |
|
| 3 |
from configfile import Config
|
|
|
|
| 4 |
from src.streamlitui.loadui import LoadStreamlitUI
|
| 5 |
from src.LLMS.groqllm import GroqLLM
|
| 6 |
|
| 7 |
from src.langgraphagent.caller_agent import Caller_Agent
|
| 8 |
-
from langchain_core.messages import HumanMessage
|
| 9 |
from src.tools.langgraphtool import APPOINTMENTS
|
| 10 |
|
| 11 |
|
| 12 |
|
|
|
|
|
|
|
| 13 |
def submit_message(model):
|
| 14 |
obj_caller_agent = Caller_Agent(model)
|
| 15 |
# caller agent
|
|
@@ -33,7 +36,7 @@ if __name__ == "__main__":
|
|
| 33 |
|
| 34 |
if user_input['selected_usecase'] == "Appointment Receptionist":
|
| 35 |
if st.chat_input("Type message here", key="message") :
|
| 36 |
-
|
| 37 |
obj_llm_config = GroqLLM(user_controls_input=user_input)
|
| 38 |
model = obj_llm_config.get_llm_model()
|
| 39 |
CONVERSATION,APPOINTMENTS= (submit_message(model))
|
|
@@ -105,8 +108,53 @@ if __name__ == "__main__":
|
|
| 105 |
st.write(customers_database)
|
| 106 |
st.title('data protection checks')
|
| 107 |
st.write(data_protection_checks)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 108 |
|
| 109 |
-
|
| 110 |
|
| 111 |
|
| 112 |
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
|
| 3 |
from configfile import Config
|
| 4 |
+
from src.graph.graph_builder import GraphBuilder
|
| 5 |
from src.streamlitui.loadui import LoadStreamlitUI
|
| 6 |
from src.LLMS.groqllm import GroqLLM
|
| 7 |
|
| 8 |
from src.langgraphagent.caller_agent import Caller_Agent
|
| 9 |
+
from langchain_core.messages import HumanMessage,AIMessage,ToolMessage
|
| 10 |
from src.tools.langgraphtool import APPOINTMENTS
|
| 11 |
|
| 12 |
|
| 13 |
|
| 14 |
+
|
| 15 |
+
|
| 16 |
def submit_message(model):
|
| 17 |
obj_caller_agent = Caller_Agent(model)
|
| 18 |
# caller agent
|
|
|
|
| 36 |
|
| 37 |
if user_input['selected_usecase'] == "Appointment Receptionist":
|
| 38 |
if st.chat_input("Type message here", key="message") :
|
| 39 |
+
# Configure LLM
|
| 40 |
obj_llm_config = GroqLLM(user_controls_input=user_input)
|
| 41 |
model = obj_llm_config.get_llm_model()
|
| 42 |
CONVERSATION,APPOINTMENTS= (submit_message(model))
|
|
|
|
| 108 |
st.write(customers_database)
|
| 109 |
st.title('data protection checks')
|
| 110 |
st.write(data_protection_checks)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
else:
|
| 114 |
+
# Basic Examples - chatbot and chatbot with tool
|
| 115 |
+
# Text input for user message
|
| 116 |
+
user_message = st.chat_input("Enter your message:")
|
| 117 |
+
if user_message:
|
| 118 |
+
# Configure LLM
|
| 119 |
+
obj_llm_config = GroqLLM(user_controls_input=user_input)
|
| 120 |
+
model = obj_llm_config.get_llm_model()
|
| 121 |
+
|
| 122 |
+
# Initialize and set up the graph based on use case
|
| 123 |
+
usecase = user_input['selected_usecase']
|
| 124 |
+
graph_builder = GraphBuilder(model)
|
| 125 |
+
graph = graph_builder.setup_graph(usecase)
|
| 126 |
+
|
| 127 |
+
# Prepare state and invoke the graph
|
| 128 |
+
initial_state = {"messages": [user_message]}
|
| 129 |
+
entry_points = {"Basic Chatbot": "chatbot", "Chatbot with Tool": "chatbot_with_tool"}
|
| 130 |
+
|
| 131 |
+
entry_points = {"Basic Chatbot": "chatbot", "Chatbot with Tool": "chatbot_with_tool"}
|
| 132 |
+
if usecase =="Basic Chatbot":
|
| 133 |
+
for event in graph.stream({'messages':("user",user_message)}):
|
| 134 |
+
print(event.values())
|
| 135 |
+
for value in event.values():
|
| 136 |
+
print(value['messages'])
|
| 137 |
+
with st.chat_message("user"):
|
| 138 |
+
st.write(user_message)
|
| 139 |
+
with st.chat_message("assistant"):
|
| 140 |
+
st.write(value["messages"].content)
|
| 141 |
+
|
| 142 |
+
else:
|
| 143 |
+
res = graph.invoke(initial_state)
|
| 144 |
+
|
| 145 |
+
for message in res['messages']:
|
| 146 |
+
if type(message) == HumanMessage:
|
| 147 |
+
with st.chat_message("user"):
|
| 148 |
+
st.write(message.content)
|
| 149 |
+
elif type(message)==ToolMessage:
|
| 150 |
+
with st.chat_message("ai"):
|
| 151 |
+
st.write("Tool Call Start")
|
| 152 |
+
st.write(message.content)
|
| 153 |
+
st.write("Tool Call End")
|
| 154 |
+
elif type(message)==AIMessage and message.content:
|
| 155 |
+
with st.chat_message("assistant"):
|
| 156 |
+
st.write(message.content)
|
| 157 |
|
|
|
|
| 158 |
|
| 159 |
|
| 160 |
|
basic_chatbot.png
ADDED
|
chatbot_with_search_tool.png
ADDED
|
configfile.ini
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
[DEFAULT]
|
| 2 |
PAGE_TITLE = Langgraph IN ACTION
|
| 3 |
LLM_OPTIONS = Groq
|
| 4 |
-
USECASE_OPTIONS = Appointment Receptionist, Customer Support
|
| 5 |
GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
|
| 6 |
|
|
|
|
| 1 |
[DEFAULT]
|
| 2 |
PAGE_TITLE = Langgraph IN ACTION
|
| 3 |
LLM_OPTIONS = Groq
|
| 4 |
+
USECASE_OPTIONS = Basic Chatbot, Chatbot with Tool, Appointment Receptionist, Customer Support
|
| 5 |
GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
|
| 6 |
|
src/basic_bot/chatbot_node.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from src.LLMS.groqllm import GroqLLM
|
| 2 |
+
from src.state.state import State
|
| 3 |
+
|
| 4 |
+
class ChatbotNode:
|
| 5 |
+
"""
|
| 6 |
+
Basic chatbot logic implementation.
|
| 7 |
+
"""
|
| 8 |
+
def __init__(self,model):
|
| 9 |
+
self.llm = model
|
| 10 |
+
|
| 11 |
+
def process(self, state: State) -> dict:
|
| 12 |
+
"""
|
| 13 |
+
Processes the input state and generates a chatbot response.
|
| 14 |
+
"""
|
| 15 |
+
return {"messages":self.llm.invoke(state['messages'])}
|
src/basic_bot/chatbot_with_tool_node.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from src.LLMS.groqllm import GroqLLM
|
| 2 |
+
from src.state.state import State
|
| 3 |
+
|
| 4 |
+
class ChatbotWithToolNode:
|
| 5 |
+
"""
|
| 6 |
+
Chatbot logic enhanced with tool integration.
|
| 7 |
+
"""
|
| 8 |
+
def __init__(self,model):
|
| 9 |
+
self.llm = model
|
| 10 |
+
|
| 11 |
+
def process(self, state: State) -> dict:
|
| 12 |
+
"""
|
| 13 |
+
Processes the input state and generates a response with tool integration.
|
| 14 |
+
"""
|
| 15 |
+
user_input = state["messages"][-1] if state["messages"] else ""
|
| 16 |
+
llm_response = self.llm.invoke([{"role": "user", "content": user_input}])
|
| 17 |
+
|
| 18 |
+
# Simulate tool-specific logic
|
| 19 |
+
tools_response = f"Tool integration for: '{user_input}'"
|
| 20 |
+
|
| 21 |
+
return {"messages": [llm_response, tools_response]}
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def create_chatbot(self, tools):
|
| 26 |
+
"""
|
| 27 |
+
Returns a chatbot node function.
|
| 28 |
+
"""
|
| 29 |
+
llm_with_tools = self.llm.bind_tools(tools)
|
| 30 |
+
|
| 31 |
+
def chatbot_node(state: State):
|
| 32 |
+
"""
|
| 33 |
+
Chatbot logic for processing the input state and returning a response.
|
| 34 |
+
"""
|
| 35 |
+
return {"messages": [llm_with_tools.invoke(state["messages"])]}
|
| 36 |
+
|
| 37 |
+
return chatbot_node
|
| 38 |
+
|
src/graph/graph_builder.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langgraph.graph import StateGraph
|
| 2 |
+
from src.state.state import State
|
| 3 |
+
from src.basic_bot.chatbot_node import ChatbotNode
|
| 4 |
+
from src.basic_bot.chatbot_with_tool_node import ChatbotWithToolNode
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
from langgraph.graph import StateGraph
|
| 8 |
+
from langgraph.prebuilt import tools_condition
|
| 9 |
+
from src.tools.search_tool import get_tools, create_tool_node
|
| 10 |
+
from src.basic_bot.chatbot_with_tool_node import ChatbotWithToolNode
|
| 11 |
+
|
| 12 |
+
class GraphBuilder:
|
| 13 |
+
"""
|
| 14 |
+
Manages the creation and setup of the StateGraph based on use cases.
|
| 15 |
+
"""
|
| 16 |
+
def __init__(self,model):
|
| 17 |
+
self.llm = model
|
| 18 |
+
self.graph_builder = StateGraph(State)
|
| 19 |
+
self.chatbot_node = ChatbotNode(model)
|
| 20 |
+
self.chatbot_with_tool_node = ChatbotWithToolNode(model)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def build_graph(self):
|
| 24 |
+
"""
|
| 25 |
+
Builds and returns the LangGraph graph based on the defined nodes and edges.
|
| 26 |
+
"""
|
| 27 |
+
# Initialize state graph
|
| 28 |
+
graph_builder = StateGraph(State)
|
| 29 |
+
|
| 30 |
+
# Define tools and tool node
|
| 31 |
+
tools = get_tools()
|
| 32 |
+
tool_node = create_tool_node(tools)
|
| 33 |
+
|
| 34 |
+
# Define LLM
|
| 35 |
+
llm = self.llm
|
| 36 |
+
|
| 37 |
+
# Define chatbot node
|
| 38 |
+
obj_chatbot_with_node = ChatbotWithToolNode(llm)
|
| 39 |
+
chatbot_node = obj_chatbot_with_node.create_chatbot(tools)
|
| 40 |
+
|
| 41 |
+
# Add nodes
|
| 42 |
+
graph_builder.add_node("chatbot", chatbot_node)
|
| 43 |
+
graph_builder.add_node("tools", tool_node)
|
| 44 |
+
|
| 45 |
+
# Define conditional and direct edges
|
| 46 |
+
graph_builder.add_conditional_edges("chatbot", tools_condition)
|
| 47 |
+
graph_builder.add_edge("tools", "chatbot")
|
| 48 |
+
|
| 49 |
+
# Set entry point and compile graph
|
| 50 |
+
graph_builder.set_entry_point("chatbot")
|
| 51 |
+
return graph_builder
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def setup_graph(self, usecase: str):
|
| 55 |
+
"""
|
| 56 |
+
Sets up the graph for the selected use case.
|
| 57 |
+
"""
|
| 58 |
+
if usecase == "Basic Chatbot":
|
| 59 |
+
self.graph_builder.add_node("chatbot", self.chatbot_node.process)
|
| 60 |
+
self.graph_builder.set_entry_point("chatbot")
|
| 61 |
+
self.graph_builder.set_finish_point("chatbot")
|
| 62 |
+
elif usecase == "Chatbot with Tool":
|
| 63 |
+
self.graph_builder = self.build_graph()
|
| 64 |
+
else:
|
| 65 |
+
raise ValueError("Invalid use case selected.")
|
| 66 |
+
return self.graph_builder.compile()
|
src/state/state.py
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Annotated
|
| 2 |
+
from typing_extensions import TypedDict
|
| 3 |
+
from langgraph.graph.message import add_messages
|
| 4 |
+
|
| 5 |
+
class State(TypedDict):
|
| 6 |
+
"""
|
| 7 |
+
Represents the structure of the state used in the graph.
|
| 8 |
+
"""
|
| 9 |
+
messages: Annotated[list, add_messages]
|
src/streamlitui/loadui.py
CHANGED
|
@@ -27,8 +27,15 @@ class LoadStreamlitUI:
|
|
| 27 |
# API key input
|
| 28 |
self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
|
| 29 |
type="password")
|
|
|
|
|
|
|
| 30 |
# Use case selection
|
| 31 |
self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
|
| 33 |
|
| 34 |
|
|
|
|
| 27 |
# API key input
|
| 28 |
self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
|
| 29 |
type="password")
|
| 30 |
+
|
| 31 |
+
|
| 32 |
# Use case selection
|
| 33 |
self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
|
| 34 |
+
|
| 35 |
+
if self.user_controls["selected_usecase"] =="Chatbot with Tool":
|
| 36 |
+
# API key input
|
| 37 |
+
os.environ["TAVILY_API_KEY"] = self.user_controls["TAVILY_API_KEY"] = st.session_state["TAVILY_API_KEY"] = st.text_input("TAVILY API KEY",
|
| 38 |
+
type="password")
|
| 39 |
|
| 40 |
|
| 41 |
|
src/tools/search_tool.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from langchain_community.tools.tavily_search import TavilySearchResults
|
| 2 |
+
from langgraph.prebuilt import ToolNode
|
| 3 |
+
|
| 4 |
+
def get_tools():
|
| 5 |
+
"""
|
| 6 |
+
Returns a list of tools to be used in the chatbot.
|
| 7 |
+
"""
|
| 8 |
+
tool = TavilySearchResults( max_results=2)
|
| 9 |
+
return [tool]
|
| 10 |
+
|
| 11 |
+
def create_tool_node(tools):
|
| 12 |
+
"""
|
| 13 |
+
Creates and returns a tool node for the graph.
|
| 14 |
+
"""
|
| 15 |
+
return ToolNode(tools=tools)
|