from langchain_groq import ChatGroq from langgraph.graph import StateGraph, MessagesState, START, END from langgraph.checkpoint.memory import MemorySaver from .utils.state import State from .utils.nodes import introduction_node, extract_business_details from utils.models_loader import llm business_state = State() class IntroductionChatbot: def __init__(self): self.memory = MemorySaver() self.llm = llm self.workflow = self._initialize_workflow() self.interact_agent = self.workflow.compile(checkpointer=self.memory) self.messages = [] def _initialize_workflow(self): workflow = StateGraph(MessagesState) workflow.add_node("chatbot", lambda state: introduction_node(state, self.llm)) workflow.add_edge(START, "chatbot") workflow.add_edge("chatbot", END) return workflow def chat(self, user_input: str): self.messages.append({"role": "user", "content": user_input}) config = {"configurable": {"thread_id": "1"}} response = self.interact_agent.invoke({"messages": [user_input]}, config)['messages'][-1].content self.messages.append({"role": "assistant", "content": response}) business_state.interactions.append({'user': user_input, 'agent_response': response}) return response def is_complete(self, latest_response: str) -> bool: return "Thanks for providing all your required business details" in latest_response def extract_details(self): response = extract_business_details(business_state.interactions) print('Extracted details:', response) return response