File size: 1,265 Bytes
b967fd3 0cbebee b967fd3 0cbebee b967fd3 0cbebee b967fd3 0cbebee b967fd3 0cbebee b967fd3 0cbebee b967fd3 0cbebee b967fd3 0cbebee b967fd3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
from src.state.state import State
from langchain_core.messages import HumanMessage, AIMessage
class BasicChatbot:
"""
Class to handle the basic chatbot functionality with memory.
"""
def __init__(self, model, session_id: str = "default"):
"""
Initialize the BasicChatbot with the given model and memory.
:param model: The LLM to be used for the chatbot (already memory-enabled).
:param session_id: Session ID for conversation memory
"""
self.model = model
self.session_id = session_id
# Memory config for the model
self.memory_config = {"configurable": {"session_id": session_id}}
def process(self, state):
"""
Process the state to generate a response from the model with memory.
:param state: The current state of the chatbot.
:return: The response generated by the model.
"""
# Get the messages from the state
messages = state['messages']
if not messages:
return state
# Use the memory-enabled model with session config
response = self.model.invoke(messages, config=self.memory_config)
return {'messages': response}
|