ArseniyPerchik commited on
Commit
e758f09
·
1 Parent(s): 61c07ad
Files changed (7) hide show
  1. globals.py +30 -0
  2. part_1.py +73 -0
  3. part_2.py +88 -0
  4. part_3.py +74 -0
  5. requirements.txt +1 -1
  6. retriever.py +37 -0
  7. tools.py +45 -0
globals.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import TypedDict, Annotated
2
+ import os
3
+ import random
4
+
5
+ from lmnr import Laminar
6
+ from dotenv import load_dotenv
7
+ load_dotenv()
8
+
9
+ import datasets
10
+ from huggingface_hub import list_models
11
+ from langchain.docstore.document import Document
12
+ from langchain_community.retrievers import BM25Retriever
13
+ from langchain_community.tools import DuckDuckGoSearchRun
14
+ from langchain.tools import Tool
15
+ from langchain_ollama import ChatOllama
16
+ from langchain_huggingface import HuggingFaceEndpoint,ChatHuggingFace
17
+ from langchain_core.messages import AnyMessage, HumanMessage, AIMessage
18
+ from langgraph.graph import StateGraph, START, END
19
+ from langgraph.graph.message import add_messages
20
+ from langgraph.prebuilt import tools_condition, ToolNode
21
+
22
+
23
+
24
+ # GLOBALS
25
+ HF_TOKEN = os.getenv('HF_TOKEN')
26
+ PHOENIX_API_KEY = os.getenv('PHOENIX_API_KEY')
27
+ LANGFUSE_PUBLIC_KEY = os.getenv('LANGFUSE_PUBLIC_KEY')
28
+ LANGFUSE_SECRET_KEY= os.getenv('LANGFUSE_SECRET_KEY')
29
+ LANGFUSE_HOST= os.getenv('LANGFUSE_HOST')
30
+ LAMINAR_API_KEY= os.getenv('LAMINAR_API_KEY')
part_1.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from globals import *
2
+
3
+ # model_name = 'qwen3:8b'
4
+ model_name = 'llama3.2:latest'
5
+
6
+
7
+ # Initialize Laminar - this single step enables automatic tracing
8
+ Laminar.initialize(project_api_key=LAMINAR_API_KEY)
9
+
10
+ llm = ChatOllama(model=model_name)
11
+
12
+
13
+ # def load_guest_dataset():
14
+ guest_dataset = datasets.load_dataset("agents-course/unit3-invitees", split="train")
15
+
16
+ docs = [
17
+ Document(
18
+ page_content='\n'.join([
19
+ f"Name: {guest['name']}",
20
+ f"Relation: {guest['relation']}",
21
+ f"Description: {guest['description']}",
22
+ f"Email: {guest['email']}",
23
+ ]),
24
+ metadata={'name': guest['name']}
25
+ ) for guest in guest_dataset
26
+ ]
27
+
28
+
29
+ bm25_retriever = BM25Retriever.from_documents(docs)
30
+
31
+ def extract_text(query: str) -> str:
32
+ """Retrieves detailed information about gala guests based on their name or relation."""
33
+ results = bm25_retriever.invoke(query)
34
+ if results:
35
+ return '\n\n'.join([doc.page_content for doc in results[:3]])
36
+ else:
37
+ return 'NO match!'
38
+
39
+ guest_info_tool = Tool(
40
+ name='guest_info_retriever',
41
+ func=extract_text,
42
+ description='Retrieves detailed information about gala guests based on their name or relation.'
43
+ )
44
+
45
+ tools = [guest_info_tool]
46
+ llm_with_tools = llm.bind_tools(tools)
47
+
48
+
49
+ class AgentState(TypedDict):
50
+ messages: Annotated[list[AnyMessage], add_messages]
51
+
52
+ def assistant(state: AgentState):
53
+ return {
54
+ 'messages': [llm_with_tools.invoke(state['messages'])]
55
+ }
56
+
57
+ builder = StateGraph(AgentState)
58
+
59
+ builder.add_node('assistant', assistant)
60
+ builder.add_node('tools', ToolNode(tools))
61
+
62
+ builder.add_edge(START, 'assistant')
63
+ builder.add_conditional_edges('assistant', tools_condition)
64
+ builder.add_edge('tools', 'assistant')
65
+ alfred = builder.compile()
66
+ with open("langgraph.png", "wb") as f:
67
+ f.write(alfred.get_graph().draw_mermaid_png())
68
+
69
+ messages = [HumanMessage(content="Tell me about our guest named 'Lady Ada Lovelace'.")]
70
+ response = alfred.invoke({'messages': messages})
71
+
72
+ print("🎩 Alfred's Response:")
73
+ print(response['messages'][-1].content)
part_2.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from globals import *
2
+ # Initialize Laminar - this single step enables automatic tracing
3
+ Laminar.initialize(project_api_key=LAMINAR_API_KEY)
4
+
5
+ # model_name = 'qwen3:8b'
6
+ model_name = 'llama3.2:latest'
7
+
8
+ llm = ChatOllama(model=model_name)
9
+
10
+ search_tool = DuckDuckGoSearchRun()
11
+ # results: str = search_tool.invoke("Who's the current President of France?")
12
+ # res_list = results.split('...')
13
+ # for r in res_list:
14
+ # print(r)
15
+ # print(results)
16
+
17
+ def get_weather_info(location: str) -> str:
18
+ """Fetches weather info."""
19
+ weather_conditions = [
20
+ {"condition": "Rainy", "temp_c": 15},
21
+ {"condition": "Clear", "temp_c": 25},
22
+ {"condition": "Windy", "temp_c": 20}
23
+ ]
24
+ # Randomly select a weather condition
25
+ data = random.choice(weather_conditions)
26
+ return f"Weather in {location}: {data['condition']}, {data['temp_c']}°C"
27
+
28
+ weather_info_tool = Tool(
29
+ name='get_weather_info',
30
+ func=get_weather_info,
31
+ description='Fetches weather info for a given location.'
32
+ )
33
+
34
+
35
+ def get_hub_stats(author: str) -> str:
36
+ """Fetches the most downloaded model from the author."""
37
+ try:
38
+ models = list(list_models(author=author, sort='downloads', direction=-1, limit=1))
39
+
40
+ if models:
41
+ model = models[0]
42
+ return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads."
43
+ else:
44
+ return f"No models found for author {author}."
45
+
46
+ except Exception as e:
47
+ return f"Error fetching models for {author}: {str(e)}"
48
+
49
+ hub_stats_tool = Tool(
50
+ name='get_hub_stats',
51
+ func=get_hub_stats,
52
+ description='Fetches the most downloaded model from the author.'
53
+ )
54
+
55
+ # print(hub_stats_tool.invoke('facebook'))
56
+
57
+ tools = [search_tool, weather_info_tool, hub_stats_tool]
58
+ chat_with_tools = llm.bind_tools(tools)
59
+
60
+
61
+ class AgentState(TypedDict):
62
+ messages: Annotated[list[AnyMessage], add_messages]
63
+
64
+
65
+ def assistant(state: AgentState):
66
+ return {
67
+ 'messages': [chat_with_tools.invoke(state["messages"])]
68
+ }
69
+
70
+
71
+ builder = StateGraph(AgentState)
72
+
73
+ builder.add_node('assistant', assistant)
74
+ builder.add_node('tools', ToolNode(tools))
75
+
76
+ builder.add_edge(START, 'assistant')
77
+ builder.add_conditional_edges('assistant', tools_condition)
78
+ builder.add_edge('tools', 'assistant')
79
+
80
+ alfred = builder.compile()
81
+
82
+ messages = [HumanMessage(content="Who is Facebook and what's their most downloaded model?")]
83
+ response = alfred.invoke({'messages': messages})
84
+
85
+ print("🎩 Alfred's response:")
86
+ print(response['messages'][-1].content)
87
+
88
+
part_3.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from globals import *
2
+ from tools import search_tool, weather_info_tool, hub_stats_tool
3
+ from retriever import guest_info_tool
4
+
5
+ # Initialize Laminar - this single step enables automatic tracing
6
+ Laminar.initialize(project_api_key=LAMINAR_API_KEY)
7
+
8
+ # model_name = 'qwen3:8b'
9
+ model_name = 'llama3.2:latest'
10
+
11
+ llm = ChatOllama(model=model_name)
12
+
13
+ tools = [guest_info_tool, search_tool, weather_info_tool, hub_stats_tool]
14
+ chat_with_tools = llm.bind_tools(tools)
15
+
16
+
17
+ class AgentState(TypedDict):
18
+ messages: Annotated[list[AnyMessage], add_messages]
19
+
20
+
21
+ def assistant(state: AgentState):
22
+ return {
23
+ 'messages': [chat_with_tools.invoke(state["messages"])]
24
+ }
25
+
26
+
27
+ builder = StateGraph(AgentState)
28
+
29
+ builder.add_node('assistant', assistant)
30
+ builder.add_node('tools', ToolNode(tools))
31
+
32
+ builder.add_edge(START, 'assistant')
33
+ builder.add_conditional_edges('assistant', tools_condition)
34
+ builder.add_edge('tools', 'assistant')
35
+
36
+ alfred = builder.compile()
37
+ with open("langgraph.png", "wb") as f:
38
+ f.write(alfred.get_graph().draw_mermaid_png())
39
+
40
+ response = alfred.invoke({'messages': "Tell me more about 'Lady Ada Lovelace'"})
41
+
42
+ print("🎩 Alfred's response:")
43
+ print(response['messages'][-1].content)
44
+
45
+ response = alfred.invoke({"messages": "What's the weather like in Paris tonight? Will it be suitable for our fireworks display?"})
46
+
47
+ print("🎩 Alfred's Response:")
48
+ print(response['messages'][-1].content)
49
+
50
+ response = alfred.invoke({"messages": "One of our guests is from Qwen. What can you tell me about their most popular model?"})
51
+
52
+ print("🎩 Alfred's Response:")
53
+ print(response['messages'][-1].content)
54
+
55
+
56
+ response = alfred.invoke({"messages":"I need to speak with 'Dr. Nikola Tesla' about recent advancements in wireless energy. Can you help me prepare for this conversation?"})
57
+
58
+ print("🎩 Alfred's Response:")
59
+ print(response['messages'][-1].content)
60
+
61
+ # First interaction
62
+ response = alfred.invoke({"messages": [HumanMessage(content="Tell me about 'Lady Ada Lovelace'. What's her background and how is she related to me?")]})
63
+
64
+
65
+ print("🎩 Alfred's Response:")
66
+ print(response['messages'][-1].content)
67
+ print()
68
+
69
+ # Second interaction (referencing the first)
70
+ response = alfred.invoke({"messages": response["messages"] + [HumanMessage(content="What projects is she currently working on?")]})
71
+
72
+ print("🎩 Alfred's Response:")
73
+ print(response['messages'][-1].content)
74
+
requirements.txt CHANGED
@@ -1 +1 @@
1
- huggingface_hub==0.25.2
 
1
+ huggingface_hub
retriever.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from globals import *
2
+
3
+ # model_name = 'qwen3:8b'
4
+ model_name = 'llama3.2:latest'
5
+
6
+
7
+ # def load_guest_dataset():
8
+ guest_dataset = datasets.load_dataset("agents-course/unit3-invitees", split="train")
9
+
10
+ docs = [
11
+ Document(
12
+ page_content='\n'.join([
13
+ f"Name: {guest['name']}",
14
+ f"Relation: {guest['relation']}",
15
+ f"Description: {guest['description']}",
16
+ f"Email: {guest['email']}",
17
+ ]),
18
+ metadata={'name': guest['name']}
19
+ ) for guest in guest_dataset
20
+ ]
21
+
22
+
23
+ bm25_retriever = BM25Retriever.from_documents(docs)
24
+
25
+ def extract_text(query: str) -> str:
26
+ """Retrieves detailed information about gala guests based on their name or relation."""
27
+ results = bm25_retriever.invoke(query)
28
+ if results:
29
+ return '\n\n'.join([doc.page_content for doc in results[:3]])
30
+ else:
31
+ return 'NO match!'
32
+
33
+ guest_info_tool = Tool(
34
+ name='guest_info_retriever',
35
+ func=extract_text,
36
+ description='Retrieves detailed information about gala guests based on their name or relation.'
37
+ )
tools.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from globals import *
2
+
3
+
4
+ search_tool = DuckDuckGoSearchRun()
5
+
6
+
7
+ def get_weather_info(location: str) -> str:
8
+ """Fetches weather info."""
9
+ weather_conditions = [
10
+ {"condition": "Rainy", "temp_c": 15},
11
+ {"condition": "Clear", "temp_c": 25},
12
+ {"condition": "Windy", "temp_c": 20}
13
+ ]
14
+ # Randomly select a weather condition
15
+ data = random.choice(weather_conditions)
16
+ return f"Weather in {location}: {data['condition']}, {data['temp_c']}°C"
17
+
18
+ weather_info_tool = Tool(
19
+ name='get_weather_info',
20
+ func=get_weather_info,
21
+ description='Fetches weather info for a given location.'
22
+ )
23
+
24
+
25
+ def get_hub_stats(author: str) -> str:
26
+ """Fetches the most downloaded model from the author."""
27
+ try:
28
+ models = list(list_models(author=author, sort='downloads', direction=-1, limit=1))
29
+
30
+ if models:
31
+ model = models[0]
32
+ return f"The most downloaded model by {author} is {model.id} with {model.downloads:,} downloads."
33
+ else:
34
+ return f"No models found for author {author}."
35
+
36
+ except Exception as e:
37
+ return f"Error fetching models for {author}: {str(e)}"
38
+
39
+ hub_stats_tool = Tool(
40
+ name='get_hub_stats',
41
+ func=get_hub_stats,
42
+ description='Fetches the most downloaded model from the author.'
43
+ )
44
+
45
+