RAG_Agent / part_3.py
ArseniyPerchik's picture
more
e758f09
from globals import *
from tools import search_tool, weather_info_tool, hub_stats_tool
from retriever import guest_info_tool
# Initialize Laminar - this single step enables automatic tracing
Laminar.initialize(project_api_key=LAMINAR_API_KEY)
# model_name = 'qwen3:8b'
model_name = 'llama3.2:latest'
llm = ChatOllama(model=model_name)
tools = [guest_info_tool, search_tool, weather_info_tool, hub_stats_tool]
chat_with_tools = llm.bind_tools(tools)
class AgentState(TypedDict):
messages: Annotated[list[AnyMessage], add_messages]
def assistant(state: AgentState):
return {
'messages': [chat_with_tools.invoke(state["messages"])]
}
builder = StateGraph(AgentState)
builder.add_node('assistant', assistant)
builder.add_node('tools', ToolNode(tools))
builder.add_edge(START, 'assistant')
builder.add_conditional_edges('assistant', tools_condition)
builder.add_edge('tools', 'assistant')
alfred = builder.compile()
with open("langgraph.png", "wb") as f:
f.write(alfred.get_graph().draw_mermaid_png())
response = alfred.invoke({'messages': "Tell me more about 'Lady Ada Lovelace'"})
print("🎩 Alfred's response:")
print(response['messages'][-1].content)
response = alfred.invoke({"messages": "What's the weather like in Paris tonight? Will it be suitable for our fireworks display?"})
print("🎩 Alfred's Response:")
print(response['messages'][-1].content)
response = alfred.invoke({"messages": "One of our guests is from Qwen. What can you tell me about their most popular model?"})
print("🎩 Alfred's Response:")
print(response['messages'][-1].content)
response = alfred.invoke({"messages":"I need to speak with 'Dr. Nikola Tesla' about recent advancements in wireless energy. Can you help me prepare for this conversation?"})
print("🎩 Alfred's Response:")
print(response['messages'][-1].content)
# First interaction
response = alfred.invoke({"messages": [HumanMessage(content="Tell me about 'Lady Ada Lovelace'. What's her background and how is she related to me?")]})
print("🎩 Alfred's Response:")
print(response['messages'][-1].content)
print()
# Second interaction (referencing the first)
response = alfred.invoke({"messages": response["messages"] + [HumanMessage(content="What projects is she currently working on?")]})
print("🎩 Alfred's Response:")
print(response['messages'][-1].content)