subashpoudel's picture
Converted code to OOP
ef9fa4b
raw
history blame
489 Bytes
from .state import DetailsFormatter
from langchain_core.messages import SystemMessage
from src.genai.utils.models_loader import llm_gpt
from .prompts import introduction_prompt
class IntroductionNode:
def __init__(self):
self.llm = llm_gpt
def run(self, state, llm):
template = introduction_prompt
messages = [SystemMessage(content=template)] + state["messages"]
response = llm.invoke(messages)
return {"messages": [response]}