subashpoudel's picture
Included CI CD
583f6dd
raw
history blame
1.28 kB
from .prompts import tool_return_prompt , extract_user_reference_prompt
from langchain_core.messages import SystemMessage, HumanMessage
from src.genai.utils.models_loader import llm_gpt
from .state import ToolResponseFormatter, UserReferenceResponseFormatter
def tool_return_node(state):
if len(state["messages"]) > 23:
state["messages"] = state["messages"][-18:]
history = state["messages"]
template = [SystemMessage(content=tool_return_prompt)] + history
# print(template)
response = llm_gpt.with_structured_output(ToolResponseFormatter).invoke(template)
print(response)
return {"messages": [{'role':'assistant','content':f'''The exact name of the tool is: {response}'''}]}
def extract_user_reference_node(state):
history = state['messages']
latest_human_message = next(
(msg for msg in reversed(history) if isinstance(msg, HumanMessage)),
None
)
template = [SystemMessage(content=extract_user_reference_prompt), HumanMessage(content=latest_human_message.content)]
response = llm_gpt.with_structured_output(UserReferenceResponseFormatter).invoke(template)
return {'messages': [{'role':'assistant','content':f'''The video idea is: {response.video_idea} and the video story is: {response.video_story}'''}]}