import json import os from src.agents.context import Context from src.utils.utils import load_config from src.agents.chat_llm import chat_llm class BaseChatAgent: def __init__(self, config_path) -> None: self.agent_config = load_config(config_path) # conversation context self.context = Context() self.history_conversations = [] # set default interview protocal index self.hist_conv_summarization = None def talk_to_user( self, user_response=None, ): self.context.add_user_prompt(user_response['patient_prompt']) response = chat_llm( messages=self.context.msg, model=self.agent_config.llm_model_path, temperature=self.agent_config.temperature, max_tokens=self.agent_config.max_tokens, n=1, timeout=self.agent_config.timeout, stop=None, ) self.context.add_assistant_prompt(response['generations'][0]) return response["generations"][0]