subashpoudel's picture
next commit
9f72bcf
raw
history blame
746 Bytes
from langchain_core.messages import SystemMessage
from src.genai.utils.models_loader import llm_gpt
from .prompts import introduction_prompt
from .state import ConversationFormatter
class IntroductionNode:
def __init__(self):
self.llm = llm_gpt
def run(self, state, llm):
template = introduction_prompt
messages = [SystemMessage(content=template)] + state["messages"]
response = llm.with_structured_output(ConversationFormatter).invoke(messages)
print('The response:', response)
print('Type of response:', type(response))
if 'True' in response.complete:
return {'messages':['completed']}
else:
return {"messages": [response.response]}