Spaces:
Sleeping
Sleeping
| from langchain_groq import ChatGroq | |
| from langgraph.graph import StateGraph, MessagesState, START, END | |
| from langgraph.checkpoint.memory import MemorySaver | |
| from langgraph.prebuilt import create_react_agent | |
| from .utils.state import State,StateUpdateFormatter | |
| # from .utils.nodes import business_interaction_node, cleanup_messages | |
| from utils.models_loader import llm | |
| from langchain_core.messages import SystemMessage, ToolMessage | |
| from .utils.prompts import business_retrieval_prompt, check_state_update_prompt | |
| from .utils.utils import manual_retrieval | |
| from context_analysis_agent.utils.utils import save_to_db | |
| business_state = State() | |
| class BusinessInteractionChatbot: | |
| def __init__(self): | |
| self.messages = [] | |
| self.business_details = None | |
| self.react_agent=create_react_agent(model=llm,tools=[]) | |
| self.memory = MemorySaver() | |
| self.workflow = self._initialize_workflow() | |
| self.interact_agent = self.workflow.compile(checkpointer=self.memory) | |
| def _initialize_workflow(self): | |
| workflow = StateGraph(MessagesState) | |
| workflow.add_node("chatbot", self._call_model) | |
| workflow.add_node("remove_message",self.delete_messages) | |
| workflow.add_edge(START, "chatbot") | |
| workflow.add_edge("chatbot","remove_message") | |
| workflow.add_edge("chatbot", END) | |
| return workflow | |
| def delete_messages(self,state): | |
| print('Entered message deletion....') | |
| if len(self.messages) > 4: | |
| print('satisfied...') | |
| self.messages = self.messages[2:] | |
| def _call_model(self, state): | |
| print('Entered into callmodel') | |
| retrievals = manual_retrieval(str([msg['content'] for msg in self.messages if msg['role'] == 'user']),business_state.business_details) | |
| template = business_retrieval_prompt(str([msg['content'] for msg in self.messages if msg['role'] == 'user']),str(business_state.business_details)) | |
| messages = [SystemMessage(content=template),ToolMessage(content="Tool's response:\n"+retrievals,tool_call_id='call_business_interaction')] + state["messages"] | |
| print('The message is:',messages) | |
| backup_response = self.react_agent.invoke({'messages':messages})['messages'][-1] | |
| print('Backup response:',backup_response.content) | |
| return {"messages": [backup_response.content]} | |
| def check_state_update(self): | |
| business_state.business_details | |
| messages = str([msg['content'] for msg in self.messages if msg['role'] == 'user']) | |
| template = check_state_update_prompt(business_state.business_details,messages) | |
| messages = [SystemMessage(content=template)] | |
| response = llm.with_structured_output(StateUpdateFormatter).invoke(messages) | |
| # response= llm.invoke(messages) | |
| # print('Response of state check:',response) | |
| return response.model_dump() | |
| def chat(self, user_input: str, business_details:dict): | |
| print('Entered into chat') | |
| business_state.business_details=business_details | |
| self.messages.append({"role": "user", "content": f'{user_input}'}) | |
| checked_details = self.check_state_update() | |
| print('Checked details:',checked_details) | |
| print('Business details:',business_state.business_details) | |
| if checked_details!= business_state.business_details: | |
| save_to_db(checked_details) | |
| print('Database Updated as the state changed....') | |
| business_state.business_details = checked_details | |
| config = {"configurable": {"thread_id": "2"}} | |
| response = self.interact_agent.invoke({"messages":self.messages}, config)['messages'][-1].content | |
| print('The response:',response) | |
| self.messages.append({"role": "assistant", "content": response}) | |
| print('The message_history:',self.messages) | |
| business_state.interactions.append({'user': user_input, 'agent_response': response}) | |
| return response , business_state.business_details |