Spaces:
Sleeping
Sleeping
File size: 1,052 Bytes
a44bd4e 706667c a44bd4e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 | import json
import os
from src.agents.context import Context
from src.utils.utils import load_config
from src.agents.chat_llm import chat_llm
class BaseChatAgent:
def __init__(self, config_path) -> None:
self.agent_config = load_config(config_path)
# conversation context
self.context = Context()
self.history_conversations = []
# set default interview protocal index
self.hist_conv_summarization = None
def talk_to_user(
self,
user_response=None,
):
self.context.add_user_prompt(user_response['patient_prompt'])
response = chat_llm(
messages=self.context.msg,
model=self.agent_config.llm_model_path,
temperature=self.agent_config.temperature,
max_tokens=self.agent_config.max_tokens,
n=1,
timeout=self.agent_config.timeout,
stop=None,
)
self.context.add_assistant_prompt(response['generations'][0])
return response["generations"][0]
|