RAG_Agent / part_2.py
ArseniyPerchik's picture
more
e758f09
from globals import *
# Initialize Laminar - this single step enables automatic tracing
Laminar.initialize(project_api_key=LAMINAR_API_KEY)
# model_name = 'qwen3:8b'
model_name = 'llama3.2:latest'
llm = ChatOllama(model=model_name)
search_tool = DuckDuckGoSearchRun()
# results: str = search_tool.invoke("Who's the current President of France?")
# res_list = results.split('...')
# for r in res_list:
# print(r)
# print(results)
def get_weather_info(location: str) -> str:
"""Fetches weather info."""
weather_conditions = [
{"condition": "Rainy", "temp_c": 15},
{"condition": "Clear", "temp_c": 25},
{"condition": "Windy", "temp_c": 20}
]
# Randomly select a weather condition
data = random.choice(weather_conditions)
return f"Weather in {location}: {data['condition']}, {data['temp_c']}°C"
weather_info_tool = Tool(
name='get_weather_info',
func=get_weather_info,
description='Fetches weather info for a given location.'
)
def get_hub_stats(author: str) -> str:
"""Fetches the most downloaded model from the author."""
try:
models = list(list_models(author=author, sort='downloads', direction=-1, limit=1))
if models:
model = models[0]
return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads."
else:
return f"No models found for author {author}."
except Exception as e:
return f"Error fetching models for {author}: {str(e)}"
hub_stats_tool = Tool(
name='get_hub_stats',
func=get_hub_stats,
description='Fetches the most downloaded model from the author.'
)
# print(hub_stats_tool.invoke('facebook'))
tools = [search_tool, weather_info_tool, hub_stats_tool]
chat_with_tools = llm.bind_tools(tools)
class AgentState(TypedDict):
messages: Annotated[list[AnyMessage], add_messages]
def assistant(state: AgentState):
return {
'messages': [chat_with_tools.invoke(state["messages"])]
}
builder = StateGraph(AgentState)
builder.add_node('assistant', assistant)
builder.add_node('tools', ToolNode(tools))
builder.add_edge(START, 'assistant')
builder.add_conditional_edges('assistant', tools_condition)
builder.add_edge('tools', 'assistant')
alfred = builder.compile()
messages = [HumanMessage(content="Who is Facebook and what's their most downloaded model?")]
response = alfred.invoke({'messages': messages})
print("🎩 Alfred's response:")
print(response['messages'][-1].content)