|
|
import os |
|
|
from langchain_core.messages import SystemMessage |
|
|
from src.genai.utils.models_loader import llm_gpt |
|
|
from .prompts import introduction_prompt |
|
|
from .state import ConversationFormatter, State |
|
|
from langsmith import traceable |
|
|
|
|
|
|
|
|
class IntroductionNode: |
|
|
def __init__(self): |
|
|
self.llm = llm_gpt |
|
|
|
|
|
@traceable(name="details collection") |
|
|
def run(self, state:State): |
|
|
template = introduction_prompt |
|
|
messages = [SystemMessage(content=template)] + state["messages"] |
|
|
result = self.llm.with_structured_output(ConversationFormatter).invoke(messages) |
|
|
print('Response:', result) |
|
|
return { |
|
|
"messages": [{"role": "assistant", "content": result.response}], |
|
|
"response": result.response, |
|
|
"completion": result.completion, |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|