Spaces:
Sleeping
Sleeping
| from langchain_groq import ChatGroq | |
| from langgraph.graph import StateGraph, MessagesState, START, END | |
| from langgraph.checkpoint.memory import MemorySaver | |
| from .utils.state import State | |
| from .utils.nodes import introduction_node, extract_business_details | |
| from src.genai.utils.models_loader import llm | |
| business_state = State() | |
| class IntroductionChatbot: | |
| def __init__(self): | |
| self.memory = MemorySaver() | |
| self.llm = llm | |
| self.workflow = self._initialize_workflow() | |
| self.interact_agent = self.workflow.compile(checkpointer=self.memory) | |
| self.messages = [] | |
| def _initialize_workflow(self): | |
| workflow = StateGraph(MessagesState) | |
| workflow.add_node("chatbot", lambda state: introduction_node(state, self.llm)) | |
| workflow.add_edge(START, "chatbot") | |
| workflow.add_edge("chatbot", END) | |
| return workflow | |
| def chat(self, user_input: str): | |
| self.messages.append({"role": "user", "content": user_input}) | |
| config = {"configurable": {"thread_id": "1"}} | |
| for message_chunk, metadata in self.interact_agent.stream( | |
| {"messages": [user_input]}, | |
| config=config, | |
| stream_mode="messages" | |
| ): | |
| yield message_chunk.content | |
| def is_complete(self, latest_response: str) -> bool: | |
| return "Thanks for providing all your required business details" in latest_response | |
| def extract_details(self): | |
| response = extract_business_details(business_state.interactions) | |
| print('Extracted details:', response) | |
| return response | |
| def reset(self): | |
| self.memory= MemorySaver() | |
| self.interact_agent = self.workflow.compile(checkpointer=self.memory) | |
| print('Memory cleared') |