Spaces:
Sleeping
Sleeping
File size: 1,753 Bytes
b55b8d4 00ad45a 93a5bf9 583f6dd b55b8d4 2c2c90a b55b8d4 93a5bf9 b55b8d4 8039e4b b55b8d4 93a5bf9 fbc17f4 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 | from langchain_groq import ChatGroq
from langgraph.graph import StateGraph, MessagesState, START, END
from langgraph.checkpoint.memory import MemorySaver
from .utils.state import State
from .utils.nodes import introduction_node, extract_business_details
from src.genai.utils.models_loader import llm
business_state = State()
class IntroductionChatbot:
def __init__(self):
self.memory = MemorySaver()
self.llm = llm
self.workflow = self._initialize_workflow()
self.interact_agent = self.workflow.compile(checkpointer=self.memory)
self.messages = []
def _initialize_workflow(self):
workflow = StateGraph(MessagesState)
workflow.add_node("chatbot", lambda state: introduction_node(state, self.llm))
workflow.add_edge(START, "chatbot")
workflow.add_edge("chatbot", END)
return workflow
def chat(self, user_input: str):
self.messages.append({"role": "user", "content": user_input})
config = {"configurable": {"thread_id": "1"}}
for message_chunk, metadata in self.interact_agent.stream(
{"messages": [user_input]},
config=config,
stream_mode="messages"
):
yield message_chunk.content
def is_complete(self, latest_response: str) -> bool:
return "Thanks for providing all your required business details" in latest_response
def extract_details(self):
response = extract_business_details(business_state.interactions)
print('Extracted details:', response)
return response
def reset(self):
self.memory= MemorySaver()
self.interact_agent = self.workflow.compile(checkpointer=self.memory)
print('Memory cleared') |