neznib commited on
Commit
ac0db5b
·
1 Parent(s): 8dffb1d
Files changed (6) hide show
  1. agent.py +53 -0
  2. app_playground.ipynb +325 -0
  3. playground.ipynb +130 -26
  4. prompts/system_prompt.md +13 -0
  5. requirements.txt +2 -1
  6. test.ipynb +123 -0
agent.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from langchain.chat_models import init_chat_model
3
+ from langchain_core.messages import HumanMessage, SystemMessage, AIMessage, AnyMessage
4
+ from langgraph.graph import add_messages
5
+
6
+ from typing_extensions import TypedDict, Annotated
7
+
8
+
9
+ class State(TypedDict):
10
+ messages: Annotated[list, add_messages]
11
+ graph_state: str
12
+
13
+
14
+ def get_llm():
15
+ os.getenv("GROQ_API_KEY")
16
+ return init_chat_model("llama-3.3-70b-versatile", model_provider="groq")
17
+
18
+
19
+ def get_graph(llm):
20
+ with open('prompts/system_prompt.md', 'r', encoding='utf-8') as markdown_file:
21
+ system_prompt = markdown_file.read()
22
+
23
+
24
+ def node_1(state):
25
+ print("---Node 1---")
26
+
27
+ messages = state['messages']
28
+
29
+ prompt: AnyMessage = [SystemMessage(content=system_prompt), messages]
30
+
31
+ response = llm.invoke([("system", prompt), messages])
32
+
33
+ return {"message": response}
34
+
35
+
36
+
37
+
38
+ from IPython.display import Image, display
39
+
40
+ from langgraph.graph import MessagesState, START, END, StateGraph
41
+
42
+ # Build graph
43
+ builder = StateGraph(State)
44
+ builder.add_node("node_1", node_1)
45
+
46
+
47
+ # Logic
48
+ builder.add_edge(START, "node_1")
49
+ builder.add_edge("node_1", END)
50
+
51
+ return builder.compile()
52
+
53
+
app_playground.ipynb ADDED
@@ -0,0 +1,325 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "id": "initial_id",
6
+ "metadata": {
7
+ "collapsed": true,
8
+ "ExecuteTime": {
9
+ "end_time": "2025-04-26T20:02:07.770606Z",
10
+ "start_time": "2025-04-26T20:02:07.726139Z"
11
+ }
12
+ },
13
+ "source": [
14
+ "from dotenv import load_dotenv\n",
15
+ "from gradio.server_messages import BaseMessage\n",
16
+ "\n",
17
+ "from agent import *\n",
18
+ "\n",
19
+ "load_dotenv()\n",
20
+ "\n",
21
+ "llm = get_llm()\n",
22
+ "\n",
23
+ "graph = get_graph(llm)\n"
24
+ ],
25
+ "outputs": [],
26
+ "execution_count": 3
27
+ },
28
+ {
29
+ "metadata": {
30
+ "ExecuteTime": {
31
+ "end_time": "2025-04-26T19:33:33.529627Z",
32
+ "start_time": "2025-04-26T19:33:33.064005Z"
33
+ }
34
+ },
35
+ "cell_type": "code",
36
+ "source": [
37
+ "res = llm.invoke(\"Hello, how are you?\")\n",
38
+ "\n",
39
+ "res.content"
40
+ ],
41
+ "id": "df3ed82e9ec7006c",
42
+ "outputs": [
43
+ {
44
+ "data": {
45
+ "text/plain": [
46
+ "\"Hello! I'm just a language model, so I don't have feelings or emotions like humans do, but I'm functioning properly and ready to help with any questions or tasks you might have. How can I assist you today?\""
47
+ ]
48
+ },
49
+ "execution_count": 3,
50
+ "metadata": {},
51
+ "output_type": "execute_result"
52
+ }
53
+ ],
54
+ "execution_count": 3
55
+ },
56
+ {
57
+ "metadata": {},
58
+ "cell_type": "code",
59
+ "outputs": [],
60
+ "execution_count": null,
61
+ "source": [
62
+ "from langchain.chat_models import init_chat_model\n",
63
+ "\n",
64
+ "\n",
65
+ "model = init_chat_model(\"llama-3.3-70b-versatile\", model_provider=\"groq\")\n"
66
+ ],
67
+ "id": "ca6d91a7416ee6c"
68
+ },
69
+ {
70
+ "metadata": {
71
+ "ExecuteTime": {
72
+ "end_time": "2025-04-26T19:41:23.111594Z",
73
+ "start_time": "2025-04-26T19:41:23.095834Z"
74
+ }
75
+ },
76
+ "cell_type": "code",
77
+ "source": [
78
+ "with open('prompts/system_prompt.md', 'r', encoding='utf-8') as markdown_file:\n",
79
+ " system_prompt = markdown_file.read()\n",
80
+ "\n",
81
+ "print(system_prompt)"
82
+ ],
83
+ "id": "fad84707949cb20e",
84
+ "outputs": [
85
+ {
86
+ "name": "stdout",
87
+ "output_type": "stream",
88
+ "text": [
89
+ "# Task: Answering GAIA Benchmark Questions\n",
90
+ "You are tasked with answering questions from the GAIA benchmark for AI agents.\n",
91
+ "\n",
92
+ "Provide ONLY the precise answer to the question. Do not include explanations, reasoning, or any additional text. Be direct, specific, and concise to meet the strict exact-matching requirements of the GAIA benchmark.\n",
93
+ "\n",
94
+ "# Output Format\n",
95
+ "- **Single-word or short-phrase answers:** If the question necessitates a brief answer, provide just that word or phrase.\n",
96
+ "- **Numerical values:** Provide only the number when applicable, with no additional formatting or units unless specifically requested.\n",
97
+ "- **Full sentences:** If the question expects a sentence, provide the exact sentence required with no extra characters, punctuation, or formatting.\n",
98
+ "\n",
99
+ "# Notes\n",
100
+ "- Be aware of strict exact-matching requirements; even minor deviations can result in an incorrect response.\n",
101
+ "- If any ambiguity exists in the phrasing of the input, respond with an answer that aligns with the GAIA benchmark's intended interpretation.\"\"\"), (\"user\", question)])\n",
102
+ "\n"
103
+ ]
104
+ }
105
+ ],
106
+ "execution_count": 5
107
+ },
108
+ {
109
+ "metadata": {
110
+ "ExecuteTime": {
111
+ "end_time": "2025-04-26T19:58:38.799277Z",
112
+ "start_time": "2025-04-26T19:58:38.529893Z"
113
+ }
114
+ },
115
+ "cell_type": "code",
116
+ "source": [
117
+ "\n",
118
+ "from langchain_core.messages import HumanMessage, SystemMessage, AIMessage, AnyMessage\n",
119
+ "\n",
120
+ "messages: AnyMessage = [AIMessage(content=\"Hello, how are you?\"),\n",
121
+ " SystemMessage(content=system_prompt),\n",
122
+ " HumanMessage(content=\"How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)? You can use the latest 2022 version of english wikipedia.\"),\n",
123
+ " AIMessage(content=\"I am fine, thank you!\"),\n",
124
+ " SystemMessage(content=system_prompt),\n",
125
+ " HumanMessage(content=\"How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)? You can use the latest 2022 version of english wikipedia.\")\n",
126
+ "]\n",
127
+ "\n",
128
+ "prompt: AnyMessage = [SystemMessage(content=system_prompt), messages]\n",
129
+ "\n",
130
+ "\n",
131
+ "response = llm.invoke(messages)"
132
+ ],
133
+ "id": "e2cbaf6bbb86c838",
134
+ "outputs": [],
135
+ "execution_count": 23
136
+ },
137
+ {
138
+ "metadata": {
139
+ "ExecuteTime": {
140
+ "end_time": "2025-04-26T19:58:42.084030Z",
141
+ "start_time": "2025-04-26T19:58:42.074169Z"
142
+ }
143
+ },
144
+ "cell_type": "code",
145
+ "source": "print(response)",
146
+ "id": "527e07754203caff",
147
+ "outputs": [
148
+ {
149
+ "name": "stdout",
150
+ "output_type": "stream",
151
+ "text": [
152
+ "content='3' additional_kwargs={} response_metadata={'token_usage': {'completion_tokens': 2, 'prompt_tokens': 541, 'total_tokens': 543, 'completion_time': 0.008988252, 'prompt_time': 0.034576024, 'queue_time': 0.09541405100000001, 'total_time': 0.043564276}, 'model_name': 'llama-3.3-70b-versatile', 'system_fingerprint': 'fp_9a8b91ba77', 'finish_reason': 'stop', 'logprobs': None} id='run-e4b157ef-483f-4252-ac10-6ce0461f3992-0' usage_metadata={'input_tokens': 541, 'output_tokens': 2, 'total_tokens': 543}\n"
153
+ ]
154
+ }
155
+ ],
156
+ "execution_count": 24
157
+ },
158
+ {
159
+ "metadata": {
160
+ "ExecuteTime": {
161
+ "end_time": "2025-04-26T19:49:26.983215Z",
162
+ "start_time": "2025-04-26T19:49:26.974629Z"
163
+ }
164
+ },
165
+ "cell_type": "code",
166
+ "source": "test = [SystemMessage(content=system_prompt), messages]",
167
+ "id": "d3ed27edf95d2361",
168
+ "outputs": [],
169
+ "execution_count": 11
170
+ },
171
+ {
172
+ "metadata": {
173
+ "ExecuteTime": {
174
+ "end_time": "2025-04-26T19:49:29.366542Z",
175
+ "start_time": "2025-04-26T19:49:29.353873Z"
176
+ }
177
+ },
178
+ "cell_type": "code",
179
+ "source": "test",
180
+ "id": "d3600eb4308a35b0",
181
+ "outputs": [
182
+ {
183
+ "data": {
184
+ "text/plain": [
185
+ "[SystemMessage(content='# Task: Answering GAIA Benchmark Questions\\nYou are tasked with answering questions from the GAIA benchmark for AI agents.\\n\\nProvide ONLY the precise answer to the question. Do not include explanations, reasoning, or any additional text. Be direct, specific, and concise to meet the strict exact-matching requirements of the GAIA benchmark.\\n\\n# Output Format\\n- **Single-word or short-phrase answers:** If the question necessitates a brief answer, provide just that word or phrase.\\n- **Numerical values:** Provide only the number when applicable, with no additional formatting or units unless specifically requested.\\n- **Full sentences:** If the question expects a sentence, provide the exact sentence required with no extra characters, punctuation, or formatting.\\n\\n# Notes\\n- Be aware of strict exact-matching requirements; even minor deviations can result in an incorrect response.\\n- If any ambiguity exists in the phrasing of the input, respond with an answer that aligns with the GAIA benchmark\\'s intended interpretation.\"\"\"), (\"user\", question)])\\n', additional_kwargs={}, response_metadata={}),\n",
186
+ " [HumanMessage(content='How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)? You can use the latest 2022 version of english wikipedia.', additional_kwargs={}, response_metadata={})]]"
187
+ ]
188
+ },
189
+ "execution_count": 12,
190
+ "metadata": {},
191
+ "output_type": "execute_result"
192
+ }
193
+ ],
194
+ "execution_count": 12
195
+ },
196
+ {
197
+ "metadata": {
198
+ "ExecuteTime": {
199
+ "end_time": "2025-04-26T19:51:05.456437Z",
200
+ "start_time": "2025-04-26T19:51:04.974887Z"
201
+ }
202
+ },
203
+ "cell_type": "code",
204
+ "source": "llm.invoke(BaseMessage(content=\"Hallo\")).content",
205
+ "id": "1c014b109e72d895",
206
+ "outputs": [
207
+ {
208
+ "ename": "ValidationError",
209
+ "evalue": "1 validation error for BaseMessage\ntype\n Field required [type=missing, input_value={'content': 'Hallo'}, input_type=dict]\n For further information visit https://errors.pydantic.dev/2.11/v/missing",
210
+ "output_type": "error",
211
+ "traceback": [
212
+ "\u001B[31m---------------------------------------------------------------------------\u001B[39m",
213
+ "\u001B[31mValidationError\u001B[39m Traceback (most recent call last)",
214
+ "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[15]\u001B[39m\u001B[32m, line 1\u001B[39m\n\u001B[32m----> \u001B[39m\u001B[32m1\u001B[39m llm.invoke(\u001B[43mBaseMessage\u001B[49m\u001B[43m(\u001B[49m\u001B[43mcontent\u001B[49m\u001B[43m=\u001B[49m\u001B[33;43m\"\u001B[39;49m\u001B[33;43mHallo\u001B[39;49m\u001B[33;43m\"\u001B[39;49m\u001B[43m)\u001B[49m).content\n",
215
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/messages/base.py:78\u001B[39m, in \u001B[36mBaseMessage.__init__\u001B[39m\u001B[34m(self, content, **kwargs)\u001B[39m\n\u001B[32m 70\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m__init__\u001B[39m(\n\u001B[32m 71\u001B[39m \u001B[38;5;28mself\u001B[39m, content: Union[\u001B[38;5;28mstr\u001B[39m, \u001B[38;5;28mlist\u001B[39m[Union[\u001B[38;5;28mstr\u001B[39m, \u001B[38;5;28mdict\u001B[39m]]], **kwargs: Any\n\u001B[32m 72\u001B[39m ) -> \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 73\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"Pass in content as positional arg.\u001B[39;00m\n\u001B[32m 74\u001B[39m \n\u001B[32m 75\u001B[39m \u001B[33;03m Args:\u001B[39;00m\n\u001B[32m 76\u001B[39m \u001B[33;03m content: The string contents of the message.\u001B[39;00m\n\u001B[32m 77\u001B[39m \u001B[33;03m \"\"\"\u001B[39;00m\n\u001B[32m---> \u001B[39m\u001B[32m78\u001B[39m \u001B[38;5;28;43msuper\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m.\u001B[49m\u001B[34;43m__init__\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43mcontent\u001B[49m\u001B[43m=\u001B[49m\u001B[43mcontent\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n",
216
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/load/serializable.py:130\u001B[39m, in \u001B[36mSerializable.__init__\u001B[39m\u001B[34m(self, *args, **kwargs)\u001B[39m\n\u001B[32m 128\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m__init__\u001B[39m(\u001B[38;5;28mself\u001B[39m, *args: Any, **kwargs: Any) -> \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 129\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"\"\"\"\u001B[39;00m \u001B[38;5;66;03m# noqa: D419\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m130\u001B[39m \u001B[38;5;28;43msuper\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m.\u001B[49m\u001B[34;43m__init__\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43m*\u001B[49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n",
217
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/pydantic/main.py:253\u001B[39m, in \u001B[36mBaseModel.__init__\u001B[39m\u001B[34m(self, **data)\u001B[39m\n\u001B[32m 251\u001B[39m \u001B[38;5;66;03m# `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks\u001B[39;00m\n\u001B[32m 252\u001B[39m __tracebackhide__ = \u001B[38;5;28;01mTrue\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m253\u001B[39m validated_self = \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m__pydantic_validator__\u001B[49m\u001B[43m.\u001B[49m\u001B[43mvalidate_python\u001B[49m\u001B[43m(\u001B[49m\u001B[43mdata\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mself_instance\u001B[49m\u001B[43m=\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m)\u001B[49m\n\u001B[32m 254\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m validated_self:\n\u001B[32m 255\u001B[39m warnings.warn(\n\u001B[32m 256\u001B[39m \u001B[33m'\u001B[39m\u001B[33mA custom validator is returning a value other than `self`.\u001B[39m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[33m'\u001B[39m\n\u001B[32m 257\u001B[39m \u001B[33m\"\u001B[39m\u001B[33mReturning anything other than `self` from a top level model validator isn\u001B[39m\u001B[33m'\u001B[39m\u001B[33mt supported when validating via `__init__`.\u001B[39m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[33m\"\u001B[39m\n\u001B[32m 258\u001B[39m \u001B[33m'\u001B[39m\u001B[33mSee the `model_validator` docs (https://docs.pydantic.dev/latest/concepts/validators/#model-validators) for more details.\u001B[39m\u001B[33m'\u001B[39m,\n\u001B[32m 259\u001B[39m stacklevel=\u001B[32m2\u001B[39m,\n\u001B[32m 260\u001B[39m )\n",
218
+ "\u001B[31mValidationError\u001B[39m: 1 validation error for BaseMessage\ntype\n Field required [type=missing, input_value={'content': 'Hallo'}, input_type=dict]\n For further information visit https://errors.pydantic.dev/2.11/v/missing"
219
+ ]
220
+ }
221
+ ],
222
+ "execution_count": 15
223
+ },
224
+ {
225
+ "metadata": {
226
+ "ExecuteTime": {
227
+ "end_time": "2025-04-26T20:00:48.379767Z",
228
+ "start_time": "2025-04-26T20:00:48.266692Z"
229
+ }
230
+ },
231
+ "cell_type": "code",
232
+ "source": "graph = get_graph(llm)",
233
+ "id": "bc4f9a98277ddc85",
234
+ "outputs": [
235
+ {
236
+ "ename": "TypeError",
237
+ "evalue": "get_graph() takes 0 positional arguments but 1 was given",
238
+ "output_type": "error",
239
+ "traceback": [
240
+ "\u001B[31m---------------------------------------------------------------------------\u001B[39m",
241
+ "\u001B[31mTypeError\u001B[39m Traceback (most recent call last)",
242
+ "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[25]\u001B[39m\u001B[32m, line 1\u001B[39m\n\u001B[32m----> \u001B[39m\u001B[32m1\u001B[39m graph = \u001B[43mget_graph\u001B[49m\u001B[43m(\u001B[49m\u001B[43mllm\u001B[49m\u001B[43m)\u001B[49m\n",
243
+ "\u001B[31mTypeError\u001B[39m: get_graph() takes 0 positional arguments but 1 was given"
244
+ ]
245
+ }
246
+ ],
247
+ "execution_count": 25
248
+ },
249
+ {
250
+ "metadata": {
251
+ "ExecuteTime": {
252
+ "end_time": "2025-04-26T20:04:01.351526Z",
253
+ "start_time": "2025-04-26T20:04:01.121488Z"
254
+ }
255
+ },
256
+ "cell_type": "code",
257
+ "source": "graph.invoke({\"messages\": [HumanMessage(content=\"Hello, how are you?\"),]})",
258
+ "id": "cda865fc8fb4cfca",
259
+ "outputs": [
260
+ {
261
+ "name": "stdout",
262
+ "output_type": "stream",
263
+ "text": [
264
+ "---Node 1---\n"
265
+ ]
266
+ },
267
+ {
268
+ "ename": "ValidationError",
269
+ "evalue": "5 validation errors for SystemMessage\ncontent.str\n Input should be a valid string [type=string_type, input_value=[SystemMessage(content='#...2f-a43a-f6e575615870')]], input_type=list]\n For further information visit https://errors.pydantic.dev/2.11/v/string_type\ncontent.list[union[str,dict[any,any]]].0.str\n Input should be a valid string [type=string_type, input_value=SystemMessage(content='# ...}, response_metadata={}), input_type=SystemMessage]\n For further information visit https://errors.pydantic.dev/2.11/v/string_type\ncontent.list[union[str,dict[any,any]]].0.dict[any,any]\n Input should be a valid dictionary [type=dict_type, input_value=SystemMessage(content='# ...}, response_metadata={}), input_type=SystemMessage]\n For further information visit https://errors.pydantic.dev/2.11/v/dict_type\ncontent.list[union[str,dict[any,any]]].1.str\n Input should be a valid string [type=string_type, input_value=[HumanMessage(content='He...82f-a43a-f6e575615870')], input_type=list]\n For further information visit https://errors.pydantic.dev/2.11/v/string_type\ncontent.list[union[str,dict[any,any]]].1.dict[any,any]\n Input should be a valid dictionary [type=dict_type, input_value=[HumanMessage(content='He...82f-a43a-f6e575615870')], input_type=list]\n For further information visit https://errors.pydantic.dev/2.11/v/dict_type",
270
+ "output_type": "error",
271
+ "traceback": [
272
+ "\u001B[31m---------------------------------------------------------------------------\u001B[39m",
273
+ "\u001B[31mValidationError\u001B[39m Traceback (most recent call last)",
274
+ "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[5]\u001B[39m\u001B[32m, line 1\u001B[39m\n\u001B[32m----> \u001B[39m\u001B[32m1\u001B[39m \u001B[43mgraph\u001B[49m\u001B[43m.\u001B[49m\u001B[43minvoke\u001B[49m\u001B[43m(\u001B[49m\u001B[43m{\u001B[49m\u001B[33;43m\"\u001B[39;49m\u001B[33;43mmessages\u001B[39;49m\u001B[33;43m\"\u001B[39;49m\u001B[43m:\u001B[49m\u001B[43m \u001B[49m\u001B[43m[\u001B[49m\u001B[43mHumanMessage\u001B[49m\u001B[43m(\u001B[49m\u001B[43mcontent\u001B[49m\u001B[43m=\u001B[49m\u001B[33;43m\"\u001B[39;49m\u001B[33;43mHello, how are you?\u001B[39;49m\u001B[33;43m\"\u001B[39;49m\u001B[43m)\u001B[49m\u001B[43m,\u001B[49m\u001B[43m]\u001B[49m\u001B[43m}\u001B[49m\u001B[43m)\u001B[49m\n",
275
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langgraph/pregel/__init__.py:2795\u001B[39m, in \u001B[36mPregel.invoke\u001B[39m\u001B[34m(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, checkpoint_during, debug, **kwargs)\u001B[39m\n\u001B[32m 2793\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m 2794\u001B[39m chunks = []\n\u001B[32m-> \u001B[39m\u001B[32m2795\u001B[39m \u001B[43m\u001B[49m\u001B[38;5;28;43;01mfor\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mchunk\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;129;43;01min\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mstream\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 2796\u001B[39m \u001B[43m \u001B[49m\u001B[38;5;28;43minput\u001B[39;49m\u001B[43m,\u001B[49m\n\u001B[32m 2797\u001B[39m \u001B[43m \u001B[49m\u001B[43mconfig\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2798\u001B[39m \u001B[43m \u001B[49m\u001B[43mstream_mode\u001B[49m\u001B[43m=\u001B[49m\u001B[43mstream_mode\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2799\u001B[39m \u001B[43m \u001B[49m\u001B[43moutput_keys\u001B[49m\u001B[43m=\u001B[49m\u001B[43moutput_keys\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2800\u001B[39m \u001B[43m \u001B[49m\u001B[43minterrupt_before\u001B[49m\u001B[43m=\u001B[49m\u001B[43minterrupt_before\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2801\u001B[39m \u001B[43m \u001B[49m\u001B[43minterrupt_after\u001B[49m\u001B[43m=\u001B[49m\u001B[43minterrupt_after\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2802\u001B[39m \u001B[43m \u001B[49m\u001B[43mcheckpoint_during\u001B[49m\u001B[43m=\u001B[49m\u001B[43mcheckpoint_during\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2803\u001B[39m \u001B[43m \u001B[49m\u001B[43mdebug\u001B[49m\u001B[43m=\u001B[49m\u001B[43mdebug\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2804\u001B[39m \u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkwargs\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2805\u001B[39m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\u001B[43m:\u001B[49m\n\u001B[32m 2806\u001B[39m \u001B[43m \u001B[49m\u001B[38;5;28;43;01mif\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mstream_mode\u001B[49m\u001B[43m \u001B[49m\u001B[43m==\u001B[49m\u001B[43m \u001B[49m\u001B[33;43m\"\u001B[39;49m\u001B[33;43mvalues\u001B[39;49m\u001B[33;43m\"\u001B[39;49m\u001B[43m:\u001B[49m\n\u001B[32m 2807\u001B[39m \u001B[43m \u001B[49m\u001B[43mlatest\u001B[49m\u001B[43m \u001B[49m\u001B[43m=\u001B[49m\u001B[43m \u001B[49m\u001B[43mchunk\u001B[49m\n",
276
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langgraph/pregel/__init__.py:2433\u001B[39m, in \u001B[36mPregel.stream\u001B[39m\u001B[34m(self, input, config, stream_mode, output_keys, interrupt_before, interrupt_after, checkpoint_during, debug, subgraphs)\u001B[39m\n\u001B[32m 2427\u001B[39m \u001B[38;5;66;03m# Similarly to Bulk Synchronous Parallel / Pregel model\u001B[39;00m\n\u001B[32m 2428\u001B[39m \u001B[38;5;66;03m# computation proceeds in steps, while there are channel updates.\u001B[39;00m\n\u001B[32m 2429\u001B[39m \u001B[38;5;66;03m# Channel updates from step N are only visible in step N+1\u001B[39;00m\n\u001B[32m 2430\u001B[39m \u001B[38;5;66;03m# channels are guaranteed to be immutable for the duration of the step,\u001B[39;00m\n\u001B[32m 2431\u001B[39m \u001B[38;5;66;03m# with channel updates applied only at the transition between steps.\u001B[39;00m\n\u001B[32m 2432\u001B[39m \u001B[38;5;28;01mwhile\u001B[39;00m loop.tick(input_keys=\u001B[38;5;28mself\u001B[39m.input_channels):\n\u001B[32m-> \u001B[39m\u001B[32m2433\u001B[39m \u001B[43m \u001B[49m\u001B[38;5;28;43;01mfor\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43m_\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;129;43;01min\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mrunner\u001B[49m\u001B[43m.\u001B[49m\u001B[43mtick\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 2434\u001B[39m \u001B[43m \u001B[49m\u001B[43mloop\u001B[49m\u001B[43m.\u001B[49m\u001B[43mtasks\u001B[49m\u001B[43m.\u001B[49m\u001B[43mvalues\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2435\u001B[39m \u001B[43m \u001B[49m\u001B[43mtimeout\u001B[49m\u001B[43m=\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mstep_timeout\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2436\u001B[39m \u001B[43m \u001B[49m\u001B[43mretry_policy\u001B[49m\u001B[43m=\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mretry_policy\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2437\u001B[39m \u001B[43m \u001B[49m\u001B[43mget_waiter\u001B[49m\u001B[43m=\u001B[49m\u001B[43mget_waiter\u001B[49m\u001B[43m,\u001B[49m\n\u001B[32m 2438\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m\u001B[43m:\u001B[49m\n\u001B[32m 2439\u001B[39m \u001B[43m \u001B[49m\u001B[38;5;66;43;03m# emit output\u001B[39;49;00m\n\u001B[32m 2440\u001B[39m \u001B[43m \u001B[49m\u001B[38;5;28;43;01myield from\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43moutput\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 2441\u001B[39m \u001B[38;5;66;03m# emit output\u001B[39;00m\n",
277
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment/agent.py:31\u001B[39m, in \u001B[36mget_graph.<locals>.node_1\u001B[39m\u001B[34m(state)\u001B[39m\n\u001B[32m 27\u001B[39m messages = state[\u001B[33m'\u001B[39m\u001B[33mmessages\u001B[39m\u001B[33m'\u001B[39m]\n\u001B[32m 29\u001B[39m prompt: AnyMessage = [SystemMessage(content=system_prompt), messages]\n\u001B[32m---> \u001B[39m\u001B[32m31\u001B[39m response = \u001B[43mllm\u001B[49m\u001B[43m.\u001B[49m\u001B[43minvoke\u001B[49m\u001B[43m(\u001B[49m\u001B[43m[\u001B[49m\u001B[43m(\u001B[49m\u001B[33;43m\"\u001B[39;49m\u001B[33;43msystem\u001B[39;49m\u001B[33;43m\"\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mprompt\u001B[49m\u001B[43m)\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mmessages\u001B[49m\u001B[43m]\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 33\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m {\u001B[33m\"\u001B[39m\u001B[33mmessage\u001B[39m\u001B[33m\"\u001B[39m: response}\n",
278
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py:370\u001B[39m, in \u001B[36mBaseChatModel.invoke\u001B[39m\u001B[34m(self, input, config, stop, **kwargs)\u001B[39m\n\u001B[32m 357\u001B[39m \u001B[38;5;129m@override\u001B[39m\n\u001B[32m 358\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34minvoke\u001B[39m(\n\u001B[32m 359\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m (...)\u001B[39m\u001B[32m 364\u001B[39m **kwargs: Any,\n\u001B[32m 365\u001B[39m ) -> BaseMessage:\n\u001B[32m 366\u001B[39m config = ensure_config(config)\n\u001B[32m 367\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m cast(\n\u001B[32m 368\u001B[39m \u001B[33m\"\u001B[39m\u001B[33mChatGeneration\u001B[39m\u001B[33m\"\u001B[39m,\n\u001B[32m 369\u001B[39m \u001B[38;5;28mself\u001B[39m.generate_prompt(\n\u001B[32m--> \u001B[39m\u001B[32m370\u001B[39m [\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_convert_input\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43minput\u001B[39;49m\u001B[43m)\u001B[49m],\n\u001B[32m 371\u001B[39m stop=stop,\n\u001B[32m 372\u001B[39m callbacks=config.get(\u001B[33m\"\u001B[39m\u001B[33mcallbacks\u001B[39m\u001B[33m\"\u001B[39m),\n\u001B[32m 373\u001B[39m tags=config.get(\u001B[33m\"\u001B[39m\u001B[33mtags\u001B[39m\u001B[33m\"\u001B[39m),\n\u001B[32m 374\u001B[39m metadata=config.get(\u001B[33m\"\u001B[39m\u001B[33mmetadata\u001B[39m\u001B[33m\"\u001B[39m),\n\u001B[32m 375\u001B[39m run_name=config.get(\u001B[33m\"\u001B[39m\u001B[33mrun_name\u001B[39m\u001B[33m\"\u001B[39m),\n\u001B[32m 376\u001B[39m run_id=config.pop(\u001B[33m\"\u001B[39m\u001B[33mrun_id\u001B[39m\u001B[33m\"\u001B[39m, \u001B[38;5;28;01mNone\u001B[39;00m),\n\u001B[32m 377\u001B[39m **kwargs,\n\u001B[32m 378\u001B[39m ).generations[\u001B[32m0\u001B[39m][\u001B[32m0\u001B[39m],\n\u001B[32m 379\u001B[39m ).message\n",
279
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py:350\u001B[39m, in \u001B[36mBaseChatModel._convert_input\u001B[39m\u001B[34m(self, input)\u001B[39m\n\u001B[32m 348\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m StringPromptValue(text=\u001B[38;5;28minput\u001B[39m)\n\u001B[32m 349\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(\u001B[38;5;28minput\u001B[39m, Sequence):\n\u001B[32m--> \u001B[39m\u001B[32m350\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m ChatPromptValue(messages=\u001B[43mconvert_to_messages\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43minput\u001B[39;49m\u001B[43m)\u001B[49m)\n\u001B[32m 351\u001B[39m msg = (\n\u001B[32m 352\u001B[39m \u001B[33mf\u001B[39m\u001B[33m\"\u001B[39m\u001B[33mInvalid input type \u001B[39m\u001B[38;5;132;01m{\u001B[39;00m\u001B[38;5;28mtype\u001B[39m(\u001B[38;5;28minput\u001B[39m)\u001B[38;5;132;01m}\u001B[39;00m\u001B[33m. \u001B[39m\u001B[33m\"\u001B[39m\n\u001B[32m 353\u001B[39m \u001B[33m\"\u001B[39m\u001B[33mMust be a PromptValue, str, or list of BaseMessages.\u001B[39m\u001B[33m\"\u001B[39m\n\u001B[32m 354\u001B[39m )\n\u001B[32m 355\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m \u001B[38;5;167;01mValueError\u001B[39;00m(msg)\n",
280
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/messages/utils.py:367\u001B[39m, in \u001B[36mconvert_to_messages\u001B[39m\u001B[34m(messages)\u001B[39m\n\u001B[32m 365\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(messages, PromptValue):\n\u001B[32m 366\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m messages.to_messages()\n\u001B[32m--> \u001B[39m\u001B[32m367\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43m[\u001B[49m\u001B[43m_convert_to_message\u001B[49m\u001B[43m(\u001B[49m\u001B[43mm\u001B[49m\u001B[43m)\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;28;43;01mfor\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mm\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;129;43;01min\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mmessages\u001B[49m\u001B[43m]\u001B[49m\n",
281
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/messages/utils.py:367\u001B[39m, in \u001B[36m<listcomp>\u001B[39m\u001B[34m(.0)\u001B[39m\n\u001B[32m 365\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(messages, PromptValue):\n\u001B[32m 366\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m messages.to_messages()\n\u001B[32m--> \u001B[39m\u001B[32m367\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m [\u001B[43m_convert_to_message\u001B[49m\u001B[43m(\u001B[49m\u001B[43mm\u001B[49m\u001B[43m)\u001B[49m \u001B[38;5;28;01mfor\u001B[39;00m m \u001B[38;5;129;01min\u001B[39;00m messages]\n",
282
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/messages/utils.py:324\u001B[39m, in \u001B[36m_convert_to_message\u001B[39m\u001B[34m(message)\u001B[39m\n\u001B[32m 321\u001B[39m \u001B[38;5;28;01melif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(message, Sequence) \u001B[38;5;129;01mand\u001B[39;00m \u001B[38;5;28mlen\u001B[39m(message) == \u001B[32m2\u001B[39m:\n\u001B[32m 322\u001B[39m \u001B[38;5;66;03m# mypy doesn't realise this can't be a string given the previous branch\u001B[39;00m\n\u001B[32m 323\u001B[39m message_type_str, template = message \u001B[38;5;66;03m# type: ignore[misc]\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m324\u001B[39m _message = \u001B[43m_create_message_from_message_type\u001B[49m\u001B[43m(\u001B[49m\u001B[43mmessage_type_str\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mtemplate\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 325\u001B[39m \u001B[38;5;28;01melif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(message, \u001B[38;5;28mdict\u001B[39m):\n\u001B[32m 326\u001B[39m msg_kwargs = message.copy()\n",
283
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/messages/utils.py:278\u001B[39m, in \u001B[36m_create_message_from_message_type\u001B[39m\u001B[34m(message_type, content, name, tool_call_id, tool_calls, id, **additional_kwargs)\u001B[39m\n\u001B[32m 276\u001B[39m kwargs[\u001B[33m\"\u001B[39m\u001B[33madditional_kwargs\u001B[39m\u001B[33m\"\u001B[39m] = kwargs.get(\u001B[33m\"\u001B[39m\u001B[33madditional_kwargs\u001B[39m\u001B[33m\"\u001B[39m) \u001B[38;5;129;01mor\u001B[39;00m {}\n\u001B[32m 277\u001B[39m kwargs[\u001B[33m\"\u001B[39m\u001B[33madditional_kwargs\u001B[39m\u001B[33m\"\u001B[39m][\u001B[33m\"\u001B[39m\u001B[33m__openai_role__\u001B[39m\u001B[33m\"\u001B[39m] = \u001B[33m\"\u001B[39m\u001B[33mdeveloper\u001B[39m\u001B[33m\"\u001B[39m\n\u001B[32m--> \u001B[39m\u001B[32m278\u001B[39m message = \u001B[43mSystemMessage\u001B[49m\u001B[43m(\u001B[49m\u001B[43mcontent\u001B[49m\u001B[43m=\u001B[49m\u001B[43mcontent\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 279\u001B[39m \u001B[38;5;28;01melif\u001B[39;00m message_type == \u001B[33m\"\u001B[39m\u001B[33mfunction\u001B[39m\u001B[33m\"\u001B[39m:\n\u001B[32m 280\u001B[39m message = FunctionMessage(content=content, **kwargs)\n",
284
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/messages/system.py:46\u001B[39m, in \u001B[36mSystemMessage.__init__\u001B[39m\u001B[34m(self, content, **kwargs)\u001B[39m\n\u001B[32m 37\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m__init__\u001B[39m(\n\u001B[32m 38\u001B[39m \u001B[38;5;28mself\u001B[39m, content: Union[\u001B[38;5;28mstr\u001B[39m, \u001B[38;5;28mlist\u001B[39m[Union[\u001B[38;5;28mstr\u001B[39m, \u001B[38;5;28mdict\u001B[39m]]], **kwargs: Any\n\u001B[32m 39\u001B[39m ) -> \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 40\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"Pass in content as positional arg.\u001B[39;00m\n\u001B[32m 41\u001B[39m \n\u001B[32m 42\u001B[39m \u001B[33;03m Args:\u001B[39;00m\n\u001B[32m 43\u001B[39m \u001B[33;03m content: The string contents of the message.\u001B[39;00m\n\u001B[32m 44\u001B[39m \u001B[33;03m kwargs: Additional fields to pass to the message.\u001B[39;00m\n\u001B[32m 45\u001B[39m \u001B[33;03m \"\"\"\u001B[39;00m\n\u001B[32m---> \u001B[39m\u001B[32m46\u001B[39m \u001B[38;5;28;43msuper\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m.\u001B[49m\u001B[34;43m__init__\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43mcontent\u001B[49m\u001B[43m=\u001B[49m\u001B[43mcontent\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n",
285
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/messages/base.py:78\u001B[39m, in \u001B[36mBaseMessage.__init__\u001B[39m\u001B[34m(self, content, **kwargs)\u001B[39m\n\u001B[32m 70\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m__init__\u001B[39m(\n\u001B[32m 71\u001B[39m \u001B[38;5;28mself\u001B[39m, content: Union[\u001B[38;5;28mstr\u001B[39m, \u001B[38;5;28mlist\u001B[39m[Union[\u001B[38;5;28mstr\u001B[39m, \u001B[38;5;28mdict\u001B[39m]]], **kwargs: Any\n\u001B[32m 72\u001B[39m ) -> \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 73\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"Pass in content as positional arg.\u001B[39;00m\n\u001B[32m 74\u001B[39m \n\u001B[32m 75\u001B[39m \u001B[33;03m Args:\u001B[39;00m\n\u001B[32m 76\u001B[39m \u001B[33;03m content: The string contents of the message.\u001B[39;00m\n\u001B[32m 77\u001B[39m \u001B[33;03m \"\"\"\u001B[39;00m\n\u001B[32m---> \u001B[39m\u001B[32m78\u001B[39m \u001B[38;5;28;43msuper\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m.\u001B[49m\u001B[34;43m__init__\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43mcontent\u001B[49m\u001B[43m=\u001B[49m\u001B[43mcontent\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n",
286
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/load/serializable.py:130\u001B[39m, in \u001B[36mSerializable.__init__\u001B[39m\u001B[34m(self, *args, **kwargs)\u001B[39m\n\u001B[32m 128\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m__init__\u001B[39m(\u001B[38;5;28mself\u001B[39m, *args: Any, **kwargs: Any) -> \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 129\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"\"\"\"\u001B[39;00m \u001B[38;5;66;03m# noqa: D419\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m130\u001B[39m \u001B[38;5;28;43msuper\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m.\u001B[49m\u001B[34;43m__init__\u001B[39;49m\u001B[43m(\u001B[49m\u001B[43m*\u001B[49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n",
287
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/pydantic/main.py:253\u001B[39m, in \u001B[36mBaseModel.__init__\u001B[39m\u001B[34m(self, **data)\u001B[39m\n\u001B[32m 251\u001B[39m \u001B[38;5;66;03m# `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks\u001B[39;00m\n\u001B[32m 252\u001B[39m __tracebackhide__ = \u001B[38;5;28;01mTrue\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m253\u001B[39m validated_self = \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m__pydantic_validator__\u001B[49m\u001B[43m.\u001B[49m\u001B[43mvalidate_python\u001B[49m\u001B[43m(\u001B[49m\u001B[43mdata\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mself_instance\u001B[49m\u001B[43m=\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m)\u001B[49m\n\u001B[32m 254\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mself\u001B[39m \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m validated_self:\n\u001B[32m 255\u001B[39m warnings.warn(\n\u001B[32m 256\u001B[39m \u001B[33m'\u001B[39m\u001B[33mA custom validator is returning a value other than `self`.\u001B[39m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[33m'\u001B[39m\n\u001B[32m 257\u001B[39m \u001B[33m\"\u001B[39m\u001B[33mReturning anything other than `self` from a top level model validator isn\u001B[39m\u001B[33m'\u001B[39m\u001B[33mt supported when validating via `__init__`.\u001B[39m\u001B[38;5;130;01m\\n\u001B[39;00m\u001B[33m\"\u001B[39m\n\u001B[32m 258\u001B[39m \u001B[33m'\u001B[39m\u001B[33mSee the `model_validator` docs (https://docs.pydantic.dev/latest/concepts/validators/#model-validators) for more details.\u001B[39m\u001B[33m'\u001B[39m,\n\u001B[32m 259\u001B[39m stacklevel=\u001B[32m2\u001B[39m,\n\u001B[32m 260\u001B[39m )\n",
288
+ "\u001B[31mValidationError\u001B[39m: 5 validation errors for SystemMessage\ncontent.str\n Input should be a valid string [type=string_type, input_value=[SystemMessage(content='#...2f-a43a-f6e575615870')]], input_type=list]\n For further information visit https://errors.pydantic.dev/2.11/v/string_type\ncontent.list[union[str,dict[any,any]]].0.str\n Input should be a valid string [type=string_type, input_value=SystemMessage(content='# ...}, response_metadata={}), input_type=SystemMessage]\n For further information visit https://errors.pydantic.dev/2.11/v/string_type\ncontent.list[union[str,dict[any,any]]].0.dict[any,any]\n Input should be a valid dictionary [type=dict_type, input_value=SystemMessage(content='# ...}, response_metadata={}), input_type=SystemMessage]\n For further information visit https://errors.pydantic.dev/2.11/v/dict_type\ncontent.list[union[str,dict[any,any]]].1.str\n Input should be a valid string [type=string_type, input_value=[HumanMessage(content='He...82f-a43a-f6e575615870')], input_type=list]\n For further information visit https://errors.pydantic.dev/2.11/v/string_type\ncontent.list[union[str,dict[any,any]]].1.dict[any,any]\n Input should be a valid dictionary [type=dict_type, input_value=[HumanMessage(content='He...82f-a43a-f6e575615870')], input_type=list]\n For further information visit https://errors.pydantic.dev/2.11/v/dict_type",
289
+ "During task with name 'node_1' and id '6ba0bd4f-d418-d63a-2d13-68271c2e3a1d'"
290
+ ]
291
+ }
292
+ ],
293
+ "execution_count": 5
294
+ },
295
+ {
296
+ "metadata": {},
297
+ "cell_type": "code",
298
+ "outputs": [],
299
+ "execution_count": null,
300
+ "source": "",
301
+ "id": "c14efd7dc261e7c7"
302
+ }
303
+ ],
304
+ "metadata": {
305
+ "kernelspec": {
306
+ "display_name": "Python 3",
307
+ "language": "python",
308
+ "name": "python3"
309
+ },
310
+ "language_info": {
311
+ "codemirror_mode": {
312
+ "name": "ipython",
313
+ "version": 2
314
+ },
315
+ "file_extension": ".py",
316
+ "mimetype": "text/x-python",
317
+ "name": "python",
318
+ "nbconvert_exporter": "python",
319
+ "pygments_lexer": "ipython2",
320
+ "version": "2.7.6"
321
+ }
322
+ },
323
+ "nbformat": 4,
324
+ "nbformat_minor": 5
325
+ }
playground.ipynb CHANGED
@@ -12,8 +12,8 @@
12
  "metadata": {
13
  "collapsed": true,
14
  "ExecuteTime": {
15
- "end_time": "2025-04-25T10:03:00.260362Z",
16
- "start_time": "2025-04-25T10:03:00.250631Z"
17
  }
18
  },
19
  "cell_type": "code",
@@ -25,13 +25,13 @@
25
  ],
26
  "id": "initial_id",
27
  "outputs": [],
28
- "execution_count": 1
29
  },
30
  {
31
  "metadata": {
32
  "ExecuteTime": {
33
- "end_time": "2025-04-25T10:03:00.271894Z",
34
- "start_time": "2025-04-25T10:03:00.266382Z"
35
  }
36
  },
37
  "cell_type": "code",
@@ -50,13 +50,13 @@
50
  ],
51
  "id": "69149c677dbbd143",
52
  "outputs": [],
53
- "execution_count": 2
54
  },
55
  {
56
  "metadata": {
57
  "ExecuteTime": {
58
- "end_time": "2025-04-25T10:03:00.555328Z",
59
- "start_time": "2025-04-25T10:03:00.546416Z"
60
  }
61
  },
62
  "cell_type": "code",
@@ -80,13 +80,13 @@
80
  ],
81
  "id": "fdda0402eb744e22",
82
  "outputs": [],
83
- "execution_count": 3
84
  },
85
  {
86
  "metadata": {
87
  "ExecuteTime": {
88
- "end_time": "2025-04-25T10:04:10.031794Z",
89
- "start_time": "2025-04-25T10:04:09.998199Z"
90
  }
91
  },
92
  "cell_type": "code",
@@ -111,45 +111,149 @@
111
  "graph = builder.compile()"
112
  ],
113
  "id": "c36263817c0e369f",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
  "outputs": [
115
  {
116
- "ename": "ModuleNotFoundError",
117
- "evalue": "No module named 'langgraph'",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
118
  "output_type": "error",
119
  "traceback": [
120
  "\u001B[31m---------------------------------------------------------------------------\u001B[39m",
121
- "\u001B[31mModuleNotFoundError\u001B[39m Traceback (most recent call last)",
122
- "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[7]\u001B[39m\u001B[32m, line 3\u001B[39m\n\u001B[32m 1\u001B[39m \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34;01mIPython\u001B[39;00m\u001B[34;01m.\u001B[39;00m\u001B[34;01mdisplay\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;28;01mimport\u001B[39;00m Image, display\n\u001B[32m----> \u001B[39m\u001B[32m3\u001B[39m \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34;01mlanggraph\u001B[39;00m\u001B[34;01m.\u001B[39;00m\u001B[34;01mgraph\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;28;01mimport\u001B[39;00m MessagesState, START, END, StateGraph\n\u001B[32m 5\u001B[39m \u001B[38;5;66;03m# Build graph\u001B[39;00m\n\u001B[32m 6\u001B[39m builder = StateGraph(State)\n",
123
- "\u001B[31mModuleNotFoundError\u001B[39m: No module named 'langgraph'"
 
124
  ]
125
  }
126
  ],
127
- "execution_count": 7
128
  },
129
  {
130
  "metadata": {
131
  "ExecuteTime": {
132
- "end_time": "2025-04-25T10:03:01.259149Z",
133
- "start_time": "2025-04-25T10:03:01.231005Z"
134
  }
135
  },
136
  "cell_type": "code",
137
- "source": "display(Image(graph.get_graph().draw_mermaid_png()))",
138
- "id": "d7410440dcfac3d0",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
139
  "outputs": [
140
  {
141
  "ename": "NameError",
142
- "evalue": "name 'graph' is not defined",
143
  "output_type": "error",
144
  "traceback": [
145
  "\u001B[31m---------------------------------------------------------------------------\u001B[39m",
146
  "\u001B[31mNameError\u001B[39m Traceback (most recent call last)",
147
- "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[5]\u001B[39m\u001B[32m, line 1\u001B[39m\n\u001B[32m----> \u001B[39m\u001B[32m1\u001B[39m display(Image(\u001B[43mgraph\u001B[49m.get_graph().draw_mermaid_png()))\n",
148
- "\u001B[31mNameError\u001B[39m: name 'graph' is not defined"
149
  ]
150
  }
151
  ],
152
- "execution_count": 5
153
  },
154
  {
155
  "metadata": {},
@@ -157,7 +261,7 @@
157
  "outputs": [],
158
  "execution_count": null,
159
  "source": "",
160
- "id": "a027a884dcb0979b"
161
  }
162
  ],
163
  "metadata": {
 
12
  "metadata": {
13
  "collapsed": true,
14
  "ExecuteTime": {
15
+ "end_time": "2025-04-26T19:28:53.649925Z",
16
+ "start_time": "2025-04-26T19:28:53.644536Z"
17
  }
18
  },
19
  "cell_type": "code",
 
25
  ],
26
  "id": "initial_id",
27
  "outputs": [],
28
+ "execution_count": 2
29
  },
30
  {
31
  "metadata": {
32
  "ExecuteTime": {
33
+ "end_time": "2025-04-26T19:28:53.782161Z",
34
+ "start_time": "2025-04-26T19:28:53.778525Z"
35
  }
36
  },
37
  "cell_type": "code",
 
50
  ],
51
  "id": "69149c677dbbd143",
52
  "outputs": [],
53
+ "execution_count": 3
54
  },
55
  {
56
  "metadata": {
57
  "ExecuteTime": {
58
+ "end_time": "2025-04-26T19:28:53.954984Z",
59
+ "start_time": "2025-04-26T19:28:53.949523Z"
60
  }
61
  },
62
  "cell_type": "code",
 
80
  ],
81
  "id": "fdda0402eb744e22",
82
  "outputs": [],
83
+ "execution_count": 4
84
  },
85
  {
86
  "metadata": {
87
  "ExecuteTime": {
88
+ "end_time": "2025-04-26T19:28:54.423804Z",
89
+ "start_time": "2025-04-26T19:28:54.098033Z"
90
  }
91
  },
92
  "cell_type": "code",
 
111
  "graph = builder.compile()"
112
  ],
113
  "id": "c36263817c0e369f",
114
+ "outputs": [],
115
+ "execution_count": 5
116
+ },
117
+ {
118
+ "metadata": {
119
+ "ExecuteTime": {
120
+ "end_time": "2025-04-26T19:28:55.442847Z",
121
+ "start_time": "2025-04-26T19:28:55.271978Z"
122
+ }
123
+ },
124
+ "cell_type": "code",
125
+ "source": [
126
+ "from IPython.display import Image, display\n",
127
+ "\n",
128
+ "display(Image(graph.get_graph().draw_mermaid_png()))"
129
+ ],
130
+ "id": "d7410440dcfac3d0",
131
  "outputs": [
132
  {
133
+ "data": {
134
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAOkAAAFNCAIAAABqr9/4AAAQAElEQVR4nOydB1wUR/vH5xrHFY6j9yKKgF3s3cSGihpREnvvJnZjjVH/vjGxR40ttsTuq7FiEkti775gxQYiSOco1zv/By+5lyQH4iuHu7Pz/dznPrszt3t3O7995plndma4xcXFiECgIVxEINATol0CXSHaJdAVol0CXSHaJdAVol0CXWG0dk3G4uxUrVphUiuMZmOxXkeDcCFfwObwWCInrtCJ4xXkiBgMi4HxXZ3W9OS24sUDVfpzjXeQo0DMETpxnT14eo0ZUR4HAbsgS69SGDlc1stH6mp1RCF1RKGRToh5ME6710/JUh6pfIIFUOqB4UJEZwx6M9yB8HdSH6tb9nCv1UyCmASDtPssQXFmd3aTzq7wQnihUZqunsjLS9d1HuLt4umAmAFTtHvtpEyrNrWN8YCqFmFKUZ7hxJaM5t3cajQQIwbACO1ePZnn4Mhu3BE3c2uTn3dm1mnpHFCT3u5QRWAj3PnlhyyeA4shwgW6DvO5f7no3qVChDuYa/f2mXxnd16Tzm6ISXQb4fM8QZmepEFYg7N2XyaqVHJTi+7MEq6FmM/875wp0KqMCF9w1u7Fn/Lqt3VGTCU0Unz5mAzhC7bafXi9yK+6QOrBlIDRP4loKsl6qS3I1iNMwVa7SXeVrXox0VsoTdveHvcuFyFMwVO7GUkag76YL+AgZgMdhxBzwDUMiqd2kx+ooJcfVS2zZs06ceIEens6duyYkZGB7AN0fUO/McIRPLWbn6ULqVvVfUuJiYno7cnKyiostGMstkYDUUYynsEyPPvV1k99PnFVdRbLLt2/R48e3bt3b3p6uqOjY2Rk5IwZM7y8vBo3bmzJFYvF58+fN5lM33///S+//JKTk+Ps7NyuXbvJkycLBAL02jzDDwsODt69e/eIESM2bNhgORA+s3LlSlTZgPt07ZSsz2f+CDswfH5XrTAKxBw7CTc+Pn7JkiXz5s1r0qQJ2Mtvv/129uzZO3bsOHXqVLdu3WbOnBkVFQUfA3Hv3Llz8eLF4eHh4A8sWrSIy+WCyiGLx+M9fvxYq9WuXbs2MDAwICBgzpw5oGPYQHZAKOGo5SaEIxhqV1VkEjnbq5WWlJTE5/N79OgBWvT39//6668zMzMhHYwrvAuFQstG165dW7RoUaNGDdgGgXbu3PnKlSvWk7x69Wrbtm2WT4pEJX65RCKxbFQ6ImeuqgjPHgoMtWsyFzsK7aVd8A3Aoo8aNapXr17NmjXz9fV1c7MRiZNKpXFxcWChwWcwGo1qtRpkbc0NCgqyCLcKYHNYfCEbPEM7VUTvEQzbamIJtyDHXgF58FPBQwCLu27dup49ew4bNuzBgwf//Njy5cu3bt368ccfg9cL/kPv3r1L54JPjKoKMLpsNgs/4SIstWtvDy80NBQM6pkzZzZv3szhcKZMmaLX/+VWgYbasWPHhg4dCh6wn5+fu7u7UqlE7wm4FHBBEI5gqF0wMxCTV8kNyA6Alb137x5sgGobNWo0fvx4aLHJZH88NmAJ2pjNZpCv1StQqVQXL14sP55jv2iPRmXyDsZzSCae8V2xlPvigRrZgatXr06bNu3cuXPQ3nry5Mn+/ft9fHy8vb35r/nPf/4DiVBBh4WFnTx5Ej7z7NkzMMytWrWSy+UpKSng+/7thNBKg/fLly8nJycjO/AsXuHpT7RLH+zXmQQRWXBe16xZ07dv34kTJ4K9hFCXxZsE3/fs2bMTJkzQaDQLFiwA0wv+LsS/+vXrB58EfQ8ZMgSabn87YURERMuWLVevXr1s2TJkB1IeqoNr4zmGAs++CfhTP61Pj/nUD8s2SsXJTNE8vCrvOMAL4QiedhckGxgmvPFzPmI210/mRzTFduA7tvPiNOnsunlWUmQHFwe+7fsT+gv+Fh+wAHU9tMPKOi0EEOwUmk1ISADP2GYW/E4HB9sPIlerVg1idjazXiaqODyWXw0BwhScxwkn3pArCg1Nu9h+ilehUNhMh+YUaLcsZwNCs3byQ+B7wVG2maXT6UC7Nr+XzWaX1SF3eldWow4ubr58hCmYj3E/uy/bL0QQwbAJY4Bz+7J9QgR4z5SD+Tjhjv297l0uSn2C5wOsZXHtZB7PkY39FE+MmFvk2Kb0eq2l1ar8afT3wvVTMkcxp0FbKcId/OcWAXqN83t4vSj+fAHCnVPbM8ErZoJwEaPm0rt1Ov/xLUXLHm7V62E4XVfC+cI75wrax3pg+e9swqw5TAtz9VdPlDx7ANFfcCFEzrQPEcoydCmPVAkXimo2Erfs7sbhMaIitcDEuaOzXmoTb8qh01gk4XoF8oUSrkjCEUt5JhMNLgWHzSrK16uKTGZz8fMEJY/PDqkrqtfaWejEuCnsmahdKzlp2uw0rbrIpJKb2BxW5Y4vgA6Fx48f16tXD1UqEhceqFbkDDcb1zdEIHHjIabCaO3alZycnKFDh/7888+IYB/IOj8EukK0S6ArRLsEukK0S6ArRLsEukK0S6ArRLsEukK0S6ArRLsEukK0S6ArRLsEukK0S6ArRLsEukK0S6ArRLsEukK0S6ArRLsEukK0S6ArRLsEukK0S6ArRLsEukK0S6ArRLsEukK0ay9YLJavry8i2A2iXXtRXFyckZGBCHaDaJdAV4h2CXSFaJdAV4h2CXSFaJdAV4h2CXSFaJdAV4h2CXSFaJdAV4h2CXSFaJdAV4h2CXSFaJdAV4h2CXSFaJdAV8jagJXM0KFD8/PzYcNsNufm5np7e8MVNhqNZJHASodBSydXDbGxsTKZLDMzMzs7G+SbkZEB22w2uc6VD7mmlUx0dHRQUFDpFLC7jRo1QoTKhmi38hkwYACfz7fuenl5DR48GBEqG6LdyqdHjx6BgYGWbTC6TZs2DQ0NRYTKhmjXLkCLTSQSIWJ07QnRrl2IiooKCAiwGN3q1asjgh1gUHxXrTDKMvQGQxXFBHt3GcfWHuvSZkjyAxWqEgQitrsvn8dnij1iRHxXozL9tj8nM0UbFC7SKE0IU0xGc/ZLbY0G4o4DvBADwF+7YG6PrM9o1dvTzccRMYBn8fLUREWvcb4sFgthDf7a3TI3OWZSEF/AQYwh5ZEi5b6ixxjMZ5TC3De6cy6/fjsXRgkXCK7l5CDgpD6pIj/7fYG5drNSdGIpDzEPHp+Tl6FHWIO5dk36YicXB8Q8pJ4OWgW2rVILmMfI1CojMx+UMxmKqywa+L4gz+8S6ArRLoGuEO0S6ArRLoGuEO0S6ArRLoGuEO0S6ArRLoGuEO0S6ArRLoGuEO0S6AoZr1b5FBUVftCh8fkLZ9G7AWeI7tlu/oLpiGALYnepiMFg2Lhp9Zmzp8RiJ0QoA2J3qUhS8rOEu3c2btgVGBCMCGVA7O5fePnyxbARsatWbjr807779xPYbPYH7TtNnDCdwykZeQEp329b//RpIovFigivM3r0ZxHhtS0HHj9xeM/e7YWFBaGh4aNGTCx9zqfPHm/duv7J00Sj0RDZsCmczdvbp/yf4evr/926nQKBABHKhtjdv8DhltzM321Y2f+ToceOnJs/719Hjh68eOk3SExLeznj8wke7p6gqvVrdwiEwhkzx+fkZEPWvXvxq9csbde249Yt+wYNHAnVvfWE2dlZ06aPZbHZq1duXrlik1xRNH3meL3+DSMaJE4SItw3QrRrA1Bh7dr1YKNRZFNfH78nTx7B9rHjhwQC4ZzZi6tXD4XXvDlLjEbjr6dPQtbpM3Gurm5jx0wKCAhq3qxVbOwg66mOnzgERhrugZCQGuFhtebO/r/MzPQLF88hwjtDtGuD6iH/nT4MWktKpQKVVP2JNUPDudw/vCyhUAhKTUp6CtsvU1/UrBlh8SuAiIg61sMTEx+Eh9V2+rPJ5eXl7ePj9/z5E0R4Z4i/awOHUrM4otfz4cG7Wq1yc3UvnS4UiiDxn1kCx/9W9yqV8tnzJ52jWlhTIIYgy89DhHeGaLeiiERiEGLpFNi1SNbRUVA6y2KnrUfVrdtg+tR5pQ8E3wMR3hniM1SUsJq1IFYAVtOyq1AqUlNTwl/HGQL8gyCqZTabLVm379ywHgX+Q3p6GsQNAgODLS9wf93c3BHhnSHarSi9esXqdNplKxZDwCE5+fmSf80Dm9qlczRkdegQVVCQ/93GVZAOQYnTrxtwFnpE99Fo1N8sWwiew6tXqT/u2jp85MePHz8s/7vSM17FJ9yGl0Ihh146y7ZcIUeEUhCfoaL4+fov/+a7LVvXjRrTH5pldes0gLCXVOoCWU0aN584Ydr+Az+eOHEY4rvTp88fM3agxUuGUO6qlZu3bFk7afJIOCo4uPqS/1tVq1bd8r8rLu7Ivv0/WHenTR8H7yuWb4C4ByL8Cebzke1fkdqih5erNx8xjMc3i9Ryfbs+HghfiN0l0BWi3fdDj17ty8qa/fmiVq3aIcKbINp9P2zZvLesLBepKyJUAKLd94OPN+aT41YBRLsEukK0S6ArRLsEukK0S6ArRLsEukK0S6ArRLsEukK0S6ArRLsEuoK5dqWeDmYmLvOD2ByWUIz5ioiYP3vuwGfnZ2gR88h+qXFyw9wwYa7d4NrCgmzMl3e0iVphCKiJ+ag4zLUbUkfMc0C3TzNrXO65fZl1WzqLJJjbXfzXcQcuH8vTqsweAQJ3f0cOh4UwRas25aVrE28Utu7lXq22COEOI7QLJN1TPk9Q6rVmWWaJC6HT6bhcrnU2EPqi1+vZbLZlxhMnF56rF69+e6mrFyOWUGaKdq3A/7179258fPzw4cMRFsybN++LL75wdHREDINZ2r148WKdOnV4PJ6TE1bz2hoMhps3b4aEhPj4+CDGwKD5GW7cuHHkyBFXV1fMhAvA3dioUaPRo0fn5TGoVcoIu6tQKECv4CrUr18fYU1ycjLcnFKpFDEA/O0uuLajRo2CDeyFC4DbwOfzo6KiCgsLEe4wQrsHDhxAjEEgEOzatevcOfyn+MVWu/n5+StWrICNESNGIIbh4eHRp08f2FiyZAnCF2y1O27cuKFDhyJm07Zt2wULFiBMwbCtdu3atRYtWiDCa1QqlUgkunTpUps2bRBeYGV3jUZjr1693NzcEOFPQLjwnpubu3DhQoQX+NhdCG1CiN5kMvn7+yPCP7BUR3CV3N0xmbkaE7sLXp1cLodeJSLcsrD4UdCzCFEIhAW01y7UG1AezZo1g9AmIryJmJgYmUyGR/SX3j7D4cOHu3fvjkpWK2Hckyjvgk6ng7A3XLQGDRog2kJju3vixIknT544vgYR3gboe4Oaat26dS9evEC0hZZ2Ny0tLSAg4OnTpzVr1kSEdyA5OdnT01MsFiMaQj+7C72dq1atgg0i3HcHGglQazVu3PjVq1eIbtBPuzk5OatXr0aESoLL5d66dev27duIbtBGu+AhLF26FDb69++PCJUKi8X66KOPYGPu3LnWFQ6pD220+9VXX02dOhUR7MngwYMnTZqEaAIN2mpY9sVTnF9+n1ralAAAEABJREFU+SUqKgpRG0rbXa1W27Jly9DQUESoWkQi0ZgxYxC1oa7dhZ53tVrt5eUFwUhEqHIeP34cHh4O8QfKdrNT1O6OHz8ebqrAwEAi3PcFCBfeofNi5cqViJJQUbtxcXHQaPDwwHktXLoALQ13d/esrCxEPajoM8BPgqgNIlAGk8lEwTmEqGh3Dxw4wKh5BijO/fv3L1++jKgHFbV7/PhxmUyGCNQgMTHx+vXriHpQcZrLfv36EWeXOtSrVw8azYh6MG4uPQI2UNFn2L9/P/F3qQP4uxcuXEDUg/i7hDdA/N23gPi7lIL4uwRCJUP8XcIbIP7uW0D8XUpB/N23gPi7lIL4uwRCJUP8XcIbIP7uW0D8XUpB/N23gPi7lIL4u2+mb9++Dg4OludEjUYji8WCbT6fv3XrVkSocgYMGMDlci3rZkJBQHHANpTLoUOHEDWgkN3VaDQpKSmlU+C+Gjx4MCK8D8Ri8Z07d/42CIBSk21SyN+NjIz8WyXg6+tLtPu+GDFihEQiKZ0CdeAnn3yCKAOFtDtkyBBvb+/SKR06dCAT8L8vmjdvHhERUdqa+Pn5xcTEIMpAIe2GhoY2bNjQerH8/f0HDRqECO8PsCZW0wtNkdjYWEqNI6RWjGzo0KFW09upUydsVkagKaVNL4QaKGV0EdW0azW9cKXgLkeE943F9ILR7dOnD9WGClcozmA0mDXKKpoeMLb3kLt3nnZs31XAc1UUGJH9YbGR2JmKce5ykOcbq6b2rh3WuE54k4KCgi4dPqqa4ig2F0vceBX55Bviu4k35fcuFeVn6QViyg3PryxcvR1y0rRhkU5tYqjeIaIoMFw/lZ90V+lXQyjL1CEckbjyMl5oqtUWNerg4h1c3moM5Wn35un8vAxDg3auTq4Vug/oi1Zlyk7V3DyVO+SLIC6PovNcFebqf1qX/kE/H6mnA2V/ZKUAmizKM1w+ktW6p3tAmLCsj5Wp3Ru/5MtlxubRnogxFObqzu3NHLYgGFEPZaHxwIrUj2cyax2uU9vSWnR3CyxDvrZv34IcfV66jlHCBaQe/NotpXfOFSDqcS1O9kF/X8QwOgzwjf+9zKXgbGsXhFtczMQZwZxcHF49VSPqkXxPKfVwQAyDL+CAWw91js1c29pVFpk8Api4aJmrN5+C0/hB4XlXE/D4OPu4ZREYJsrP1tvMsh0bMujMBi1iIBCgkWVRrv0Od1M+plGFN6IoNBaXEZ6lWVyTQLBCtEugK0S7BLpCtEugK0S7BLpCtEugK0S7BLpCtEugK0S7BLpCtEugK0S7BLpCrcc7ho/8+Nu136B3ICUlee78qR/FdITXnHlTkpOfI8L/yvkLZz/o0LioqBD9r2i12s1b1vYbEN2pS3N437tvp9FYaQOHsLK7eXm5k6eODgwMnv35QrPZ/MOPWz6f/enO7YfEYjEivA+Wr1j8n/hbo0d96u8XeO9+/NZt34F2hwwehSoDrLT76+mTWq3mq3+tcRI7wa6Pj9+IUZ88eJDQvHlrRKhyFErFzZtXP504o0uXaFQyJ1/D58+fXLr0G+W027tPp8EDR2bnZP32+68ajbpu3YYzps13cyuZYEGv12/bvuH386cLCvIhpWOHrsOGjuVyS776/v2Eb9d98/LlC29v31EjJ5Y+YWFhwYZNq+/evQN1VkhIKNy7DRs0Lv839OjRp22bDy3CBTw9S6Z6kMuLEPM4dvzQjp2blv5rzdr1y9PSUiROzoMGjezWtZclN+7U0YP/3p2R8UogEDZr2nL8uKmuriXzD4FR/G7DyrNnfzYXm1s0b9OwYRPrCSFr955tv/1+Ojs708PDK7bvwF49+5b/G6AgThw/XzqF8xpUSVSavwta3Hfgh+DgkH17TmzfevDZs8e7dv8xf+Oab7/++Zfj48ZO2bnj0MgRE48cPQA+EKQrlcp5X0yDy7ppw655c5ccP35IJvtjymio8WfN/uzhw3uzPl+4eePu8LBas+dMeqPzKnGSBAQEWXdv3LzCYrFq1a6HmAcUh0ql/HH31kVfLjtx7Hznzt1Xr1mam5sDWadPx61YuaRzp+7btx5YvHD502eP58ydbBm2CP7oybgjEyZM27xpD1gfawkCmzZ/e+DgroH9h2/begCEu/67FXADVPDHgNcLJXv8xOErVy/ExlbaXEeV2VYLCqzWNaonXDVPT6+mTVo+efIIEsFqnj4TB9XEhx909vP179Sxa0zvfifjfjIYDNdvXFYo5JM++7x69dASdc5aBLuWU92+cwOu6Yzp8yMbNgkKqgb1jpeXz09H9lf8x2RlZa5dtyy6e29/vwDESMBSDug3DMoCbuCuUb1gNynpKaT/+9CeVq3aDRwwHO7zBg0affbpTLjUDx7chSwoqdat2kMhwkUDs9q4UXPLqcDKHDv+708+Hgy1vyWrS+eShlcFf8nsuZP6fhy1dev6GdO/6PBhF1RJVKZ2oWa3bjs5SeSvhZiU/MxkMtWKqGvNCgurBTfiq1epL18mOzo6gqm2pHt4eMLLsp2Y+IDH4zWo3+iPX8lm16tb4i2hipGW9nLy1FGhNcJA9IjBWEsEigO9dkBLFJz87G/FAe/Pk56CNUlPTwsPr23NioioY9kA0cOBVikD9es3ApdDra7Q2L5Jn36+7Jv10dEx3yxbCM4MqiQqs63G5/NL71qGfanVKngXCkXWdPCxUMlsu2q1Rs3n/2VUnCXLchRcyi5dW1qz4Aaw+GRv5MnTRPA36tZp8MX8rxwcGDc+sTR/KxFUXKzRasA9KF0cwj+LA7JQyZx5/z2kdHHA+9TpY62D+Sw+Rn6BTCgUojcRElIDXk0aN4cTbty0unu3jyytnXfE7nEGkagkPmX58xYs25DuyHcEn6z0h5VKhfUokN33m/eWzgXri95EamrKzM8nQsU3fdo8qs2fRQUEjgK4jKWLQ1WqOEp2S5VI6eKAd2iThFSrUfpsnh5e5XwXuNfx8bdat/7Aqu8a1WvqdDq4Sazt6XfB7n0TUG2Bhh48vGtNgRYYBFz9/AICA4KhJoLeBEs6NMXy8/9YIgVqLohOgK2FYK3lBfbA3f0N80XA2eYvmN4osunMGV8Q4doEDB4I6P6DBGvKo4f30GvPAYyFt5ePxSe2cOfODcsGFCK4cBAmshaHROLs7Cwtv1oDq7z0my+hfWZNAccaLDfcP6hS/guyM84SZ/D99+zd4evjHxoanpBw2+L1w0WEsCvckdCiGj36M6PB8P229S4urpajQH/grX619IuJE6Z7efuA3Neu/WbgwBFwYDnfBb4UOGEQTUu4e8ea6O7mUTr4QICW/r++mg8xsrZtOmRmpa/7bkX9+pHhr73eDz/sAukQagCH+Pad69YGBtga8FZ3/rAZ9ApmBcJkEEqDSBnE4Mr5orCaEeAnrFu/HMx8teDqT54+2n/gB4jTVYrDgKqmbwIiCeBgrVn7NYRsoZYZNHDkgP7DIB0uxOJFKyDaMmnySAgjgOYOHd5rcaTAan7z9bqNm9d8uehz6G6A6O/gwaMgNFP+F8Un3AJTveDLmaUTe0THTJs6FxH+pGOHKJ1OCxr9fut6cAbAvxo7drIla+iQMRAX2rR5DcQomzdrPWbMpIWLZsE2ZE0YNxUq+i3fr4VoFzQ8WrZoC+HON37Xwi+XQZj5x13fQ5Qdivjj2EH9+w1DlYTt+chu/pqv16L67V0Rw1DLjae2pQ1fWA1RCVWR8eCqtL7TqPWrqoazezIiP5AGRdhoEZLnyAh0hWbanTNvyoNS7YzSdO/We9yfdR+haoAu/bnzp5SVu3vXMWjtILtBM+3OmDZfb7A9O1XpmCWhaqhZM2LLX+OYpamUQFg50Ey7lod7CBQB+j58vN/bzKrE3yXQFaJdAl0h2iXQFaJdAl0h2iXQFaJdAl0h2iXQFaJdAl0h2iXQFdvadXBkmRET11eDP+3uy0cUo7gYufsxcckwQOLCY3NsS9H2uAknF17uSw1iHvmZumJEOcRSbmaKRqcxIeaR8kjp6m17OWvb2vUM4FNvhbyqQJGvDwx78+DBqqdGfXFBDuOWWFPJjZ6BfJHEtndQpt31q+F48XAWYhIZSarnCYoG7aSIerTu5X5uTyZiGGd3pzftXOYAiDLXcQceXit6lqCs387NxcuBw8V5QdCiPH1umibxRlG/mQFsNkVrHLXCuHNRyof9faWeDmWZIjzQqk1QIleOZncb4ePhV2bzozztAi8eqhIuFGa90HK4VVeiJrMZBMSqqsaiuz9fVWSs2VDcrGuF5n94jxj15isn8pLvq0C+uWlV5EKYSxRSzGZVkfGSevDkMkNwbVHjTi7lL//9Bu1a0WnMqKoYNWrU7Nmza9SogaoENhvRbplpsExVtmb3kSNHUlNTJ0+uojEpxWbkKKpQcVS06uELqq50TcVarkNxVX4j7XAUVt3sE2yuqZilp2BxkL4JAl0h2iXQFaJdAl0h2iXQFaJdAl0h2iXQFaJdAl0h2iXQFaJdAl0h2iXQFaJdAl0h2iXQFaJdAl0h2iXQFaJdAl0h2iXQFaJdAl0h2iXQFaJdAl0h2iXQFaJdAl2honYrOOyeUGVYlhSmGlQcRx4REXH58mVEoAa3b9+uW7cuoh5U1O6CBQtkMtngwYNfvHiBCO+PCxcutGvXrlGjRtHR0Yh6sChbQT969AhE3LZt20mTJiFC1aJWq+Hig6uwePFisViMKAl1556pVavWoUOHnJ2du3TpcuPGDUSoKg4ePAjXvHv37qtWraKscBGV7a6VvLw8sAFubm6LFi1is8lET3YkJSXlyy+/BKsxa9YsRHlooF0Lp06dgssKIu7Rowci2IH169f//vvvYCDq1KmD6ABtzFi3bt1u3bp1586dsWPHZmUxa1Jre3Pz5s2oqCiRSHT48GG6CBfRyO5aAfmC9Y2JiRk5ciQivDNwMXNzc6FN5uHhgWgF/dxHCNnExcXpdLrevXvfu3cPEf5XTp482bhx42bNmm3cuJF2wkV0tLtWUlNTwQMODQ2dO3cuIrwN2dnZcOm8vLzAu0W0hcbN9sDAwB07doSFhbVq1ercuXOIUDG2b98+fPhw8LhoLVxEa+1a6NOnDwj3119/nTJlilwuR4SyefDgAbQTNBoNBG2aNGmCaA6NfYa/cenSJWh2jB49esCAAYjwD5YuXfr48WNokwUFBSEswCfU36ZNGwhPZmZmDhw48Pnz54jwJ1AvtW7dGhoGP/zwAzbCRTjZXStgXaAh0qJFC/AiELNRKBRwKbhcLri2AoEA4QWGXazh4eEHDhyAPuSOHTtevXoVMZX9+/dDH2SvXr2WLVuGn3ARlnbXSkFBAXjATk5O4OSB7UGMISkpCf54gwYNZs6cifAFZ+1agBAEFOScOXM++ugjxAC+/fbbK1euwO0K9Q/CGvwfy7I8Qnn//v1Ro0alp6eXzurUqdOuXbsQPZkxY8bfUq5du9a5c2cXF5eDB+LqLL4AAAdlSURBVA9iL1zEBLtrJT4+Hhou0dHRY8aMsaRERkb6+/tv3rzZx8cH0QqIqCxZskSpVFqebDYajfDXioqKoE0Gjj5iBgx6HLZhw4bHjx+He7Vnz54JCQnt27dns9lgidesWYPoxrp16woLC00mU9euXY8dOwY9ixAiXL9+PXOEixg4xn3s2LFgemNjY/V6PeyyWKxbt25dvHixbdu2iCZs3LgRbjnLWtg5OTl3795l5rgSJg5D8PPzg0rWugtVLVgsRBOSk5Pj4uLA4lp2QcFnzpxBjISJ2u3WrVvpCQeg+F+9erVlyxZEB8DDgb7D0ikajQb+EWIeTNQuCFcoFKLXk5jANryD//DTTz+lpaUhagPxPvDUrbvwy8FlF4lEFv+HaTAozlCa8+fP579Glq3QyKSOyIeHpG5SH4ETryBbiyiJkwtPlptvKFbrUJ7GnCrx0Xh4ukPjTCqVQkMNMQ+Gahd4dEOecL5IUWgUuwvFbgIOj8114PD4HPS6DURFzMUGncmgM5pNZnm2Sp6jDqoljmzv7Fsdw/7eisBE7SbfV108kscTOLgGOAuc+Yi2KGWavJQCsTOnfR9Xd19HxDCYpV1oncftyC6SmTxCXBzFDggLFLlqeZYipK6wRVcpYhLM0u7eb9IcXcWu/hKEHZmP81zdWV0GeyLGwCDt7l+V7uQjFUmxrVtzXxR4eLPbx7giZsCUGNmer1MlvjgLF/Co5pKXYz63PxcxA0Zo9+ed2SIPidAZ/9aMe5BLTobp3pUixADw1+7TeIW8qFjq64SYgU+ER8J5uaLAgHAHf+1ePipzCWBWA1ziLbl0VIZwB3Pt3r1UKJAKHAQ8xCSkvuKslzpZpg5hDebafXBF4RpI3YjY8nX9fzqxHNkBlwDn+POYe704azc/W6/VmPlCTPog3gonD2HSXSXCGpy1m3xPKXYTIkbC5XEcxbyMZA3CF5zHTeSm68Xu9govmEzGsxd2JNw/U1CYKXX2atuyf8umfSxZC7+O6tBueGFRdvy903q9ulpQg9hecyUSd8hKfplw5OSKnJwXri6+XTuOR/ZE7CHKStH6hmD7pA7Odjc3Xcfh2esPnvx13YXLuz9sO3TGp3tBuMfiVt24fcySxWZzf7+0y8uz2rzpR2d8ti8988nZC9shXaNV7twzUyiQTB6/c0Dsoqu3DisUechusNisghycI2U4a1erNHEdOMgOgAqv3jjUrvWgJg27u7sFgMVt3LD7b5d+tH7AyzO4aWQPDocLJjkstEVaeiIkJj69otbIe0fP8PUODfCr1S/mS9hFdoPrwFUUGBG+YKtdk9EsduHZSbsZmU9NZmPN6k2tKdWrRcryX+l0asuuj1eoNQsMrUWj2TkveDxHb88QS7rU2dNZYsdHZ3iOHDaHqs8iVwbY+rscLrsoT+9lNMMGqmwsGt20fUKpB9VLHmlSKGV8fknrkMfj2zzKgfeXfmnLh+2EyWDWa6m4DnBlgXNbzVHEMepN9tCuo6MI3gfELvbxql463dnZq5yjQLha7V/iVhqNAtkNg84oluJcvjj/N5GEa9SZ+MLK71Tz8Q7lcHhKZb5nnQ6WFKWqAFpH4KOUc5SnRxB4Glk5yRa3ITP7OdhpZDdAu56ednGZKALO2vUK5Ofm6kQulf/4mMBR3KJJ719//14kkkKrq6Aw69jPq8F/HTloVTlHhddsxXcQHj25olvniSaT4dSZjWKxHZ+1Naj0XhTuU3x3cNZujfrilwdlKMgZ2YEeUZMFjk5xp9fLFXlOYrdaYW26dnpDvFYskg4bsOzoqVXfbR3jIvXp1nHCxWv7LY6yPSjMVIfUodk8a28F5uMmNn6eVLNNoD1cXoqjyFMb5IqYib4IXzAv1NotnIuyMO/Wt4lKpq7XCvNHljGfS69ltOuWOS/KGVy59ccpKWn3bWaZTUY2x/b1gW6FOhGVNvfebxd/KN2vURpHvlirs33vjR++wc83zGaWRq4zaXU1GpQX9MAA/MdaXouTvUop9ghxsZkrl+cZTbYnRNIbdA4827M3iEWuDg6V1gSESJlGaztYZjDoeGX8BomTB5drO4SSGp/5QV/XgJqYP4fEiHHCe5enuoV42iNYRkHk2UqBg67TAPwHuzOiERM72T/pejpiAOAtyDOLmCBcxBDt8hzYn0z3T7ubibBGrzHkJeUNmhOImAFTgkdu3vzoEZ5PLryE3iaEIxAUS4vPHDArADEGZs3ppFGa9nyd6hokxWxaJ1lqIduk6z0B52juP2HiPJDn9uUmP1R5Vndx9hYjmpOXUpj1tKBlT/fID5g1jh8xdv5deb7hwmFZRrLayV0o9hCJXR3ZHNq4T0aDSZGrVuWpzUZjcISwbYw7YiTMnTsavXYhkh8on95RKYqMqgKDg4Aj8RBolRQdJ8PjcxT5Or3G6O4vcHLhhkWKQLj2G9REfRit3dLodWa13AhqNpsQNYE+PqGEK5JwOVycR0NUHKJdAl1h3NqABGwg2iXQFaJdAl0h2iXQFaJdAl0h2iXQlf8HAAD//+V60doAAAAGSURBVAMAq9lAdAlBAfUAAAAASUVORK5CYII=",
135
+ "text/plain": [
136
+ "<IPython.core.display.Image object>"
137
+ ]
138
+ },
139
+ "metadata": {},
140
+ "output_type": "display_data"
141
+ }
142
+ ],
143
+ "execution_count": 6
144
+ },
145
+ {
146
+ "metadata": {
147
+ "ExecuteTime": {
148
+ "end_time": "2025-04-26T19:17:49.705656Z",
149
+ "start_time": "2025-04-26T19:17:49.694242Z"
150
+ }
151
+ },
152
+ "cell_type": "code",
153
+ "source": "",
154
+ "id": "a027a884dcb0979b",
155
+ "outputs": [],
156
+ "execution_count": 7
157
+ },
158
+ {
159
+ "metadata": {
160
+ "ExecuteTime": {
161
+ "end_time": "2025-04-26T19:29:53.954345Z",
162
+ "start_time": "2025-04-26T19:29:53.915031Z"
163
+ }
164
+ },
165
+ "cell_type": "code",
166
+ "source": [
167
+ "import os\n",
168
+ "from dotenv import load_dotenv\n",
169
+ "from agent import *\n",
170
+ "\n",
171
+ "load_dotenv()\n",
172
+ "\n",
173
+ "llm = get_llm()"
174
+ ],
175
+ "id": "a27867479371d346",
176
+ "outputs": [
177
+ {
178
+ "ename": "NameError",
179
+ "evalue": "name 'os' is not defined",
180
  "output_type": "error",
181
  "traceback": [
182
  "\u001B[31m---------------------------------------------------------------------------\u001B[39m",
183
+ "\u001B[31mNameError\u001B[39m Traceback (most recent call last)",
184
+ "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[10]\u001B[39m\u001B[32m, line 7\u001B[39m\n\u001B[32m 3\u001B[39m \u001B[38;5;28;01mfrom\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34;01magent\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[38;5;28;01mimport\u001B[39;00m *\n\u001B[32m 5\u001B[39m load_dotenv()\n\u001B[32m----> \u001B[39m\u001B[32m7\u001B[39m llm = \u001B[43mget_llm\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\n",
185
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment/agent.py:13\u001B[39m, in \u001B[36mget_llm\u001B[39m\u001B[34m()\u001B[39m\n\u001B[32m 12\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34mget_llm\u001B[39m():\n\u001B[32m---> \u001B[39m\u001B[32m13\u001B[39m \u001B[43mos\u001B[49m.getenv(\u001B[33m\"\u001B[39m\u001B[33mGROQ_API_KEY\u001B[39m\u001B[33m\"\u001B[39m)\n\u001B[32m 14\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m init_chat_model(\u001B[33m\"\u001B[39m\u001B[33mllama-3.3-70b-versatile\u001B[39m\u001B[33m\"\u001B[39m, model_provider=\u001B[33m\"\u001B[39m\u001B[33mgroq\u001B[39m\u001B[33m\"\u001B[39m)\n",
186
+ "\u001B[31mNameError\u001B[39m: name 'os' is not defined"
187
  ]
188
  }
189
  ],
190
+ "execution_count": 10
191
  },
192
  {
193
  "metadata": {
194
  "ExecuteTime": {
195
+ "end_time": "2025-04-26T19:23:39.149221Z",
196
+ "start_time": "2025-04-26T19:23:38.656230Z"
197
  }
198
  },
199
  "cell_type": "code",
200
+ "source": [
201
+ "res = llm(\"Hello, how are you?\")\n",
202
+ "print(res.content)"
203
+ ],
204
+ "id": "e4ef9bf9d59e87b",
205
+ "outputs": [
206
+ {
207
+ "name": "stderr",
208
+ "output_type": "stream",
209
+ "text": [
210
+ "/var/folders/mf/9y254h_17lg1x86hptmcd4v80000gn/T/ipykernel_81627/3192806303.py:1: LangChainDeprecationWarning: The method `BaseChatModel.__call__` was deprecated in langchain-core 0.1.7 and will be removed in 1.0. Use :meth:`~invoke` instead.\n",
211
+ " res = llm(\"Hello, how are you?\")\n"
212
+ ]
213
+ },
214
+ {
215
+ "ename": "AttributeError",
216
+ "evalue": "'str' object has no attribute 'content'",
217
+ "output_type": "error",
218
+ "traceback": [
219
+ "\u001B[31m---------------------------------------------------------------------------\u001B[39m",
220
+ "\u001B[31mAttributeError\u001B[39m Traceback (most recent call last)",
221
+ "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[11]\u001B[39m\u001B[32m, line 1\u001B[39m\n\u001B[32m----> \u001B[39m\u001B[32m1\u001B[39m res = \u001B[43mllm\u001B[49m\u001B[43m(\u001B[49m\u001B[33;43m\"\u001B[39;49m\u001B[33;43mHello, how are you?\u001B[39;49m\u001B[33;43m\"\u001B[39;49m\u001B[43m)\u001B[49m\n\u001B[32m 2\u001B[39m \u001B[38;5;28mprint\u001B[39m(res.content)\n",
222
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/_api/deprecation.py:191\u001B[39m, in \u001B[36mdeprecated.<locals>.deprecate.<locals>.warning_emitting_wrapper\u001B[39m\u001B[34m(*args, **kwargs)\u001B[39m\n\u001B[32m 189\u001B[39m warned = \u001B[38;5;28;01mTrue\u001B[39;00m\n\u001B[32m 190\u001B[39m emit_warning()\n\u001B[32m--> \u001B[39m\u001B[32m191\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mwrapped\u001B[49m\u001B[43m(\u001B[49m\u001B[43m*\u001B[49m\u001B[43margs\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n",
223
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py:1190\u001B[39m, in \u001B[36mBaseChatModel.__call__\u001B[39m\u001B[34m(self, messages, stop, callbacks, **kwargs)\u001B[39m\n\u001B[32m 1168\u001B[39m \u001B[38;5;129m@deprecated\u001B[39m(\u001B[33m\"\u001B[39m\u001B[33m0.1.7\u001B[39m\u001B[33m\"\u001B[39m, alternative=\u001B[33m\"\u001B[39m\u001B[33minvoke\u001B[39m\u001B[33m\"\u001B[39m, removal=\u001B[33m\"\u001B[39m\u001B[33m1.0\u001B[39m\u001B[33m\"\u001B[39m)\n\u001B[32m 1169\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m__call__\u001B[39m(\n\u001B[32m 1170\u001B[39m \u001B[38;5;28mself\u001B[39m,\n\u001B[32m (...)\u001B[39m\u001B[32m 1174\u001B[39m **kwargs: Any,\n\u001B[32m 1175\u001B[39m ) -> BaseMessage:\n\u001B[32m 1176\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"Call the model.\u001B[39;00m\n\u001B[32m 1177\u001B[39m \n\u001B[32m 1178\u001B[39m \u001B[33;03m Args:\u001B[39;00m\n\u001B[32m (...)\u001B[39m\u001B[32m 1188\u001B[39m \u001B[33;03m The model output message.\u001B[39;00m\n\u001B[32m 1189\u001B[39m \u001B[33;03m \"\"\"\u001B[39;00m\n\u001B[32m-> \u001B[39m\u001B[32m1190\u001B[39m generation = \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43mgenerate\u001B[49m\u001B[43m(\u001B[49m\n\u001B[32m 1191\u001B[39m \u001B[43m \u001B[49m\u001B[43m[\u001B[49m\u001B[43mmessages\u001B[49m\u001B[43m]\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mstop\u001B[49m\u001B[43m=\u001B[49m\u001B[43mstop\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mcallbacks\u001B[49m\u001B[43m=\u001B[49m\u001B[43mcallbacks\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43m*\u001B[49m\u001B[43m*\u001B[49m\u001B[43mkwargs\u001B[49m\n\u001B[32m 1192\u001B[39m \u001B[43m \u001B[49m\u001B[43m)\u001B[49m.generations[\u001B[32m0\u001B[39m][\u001B[32m0\u001B[39m]\n\u001B[32m 1193\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(generation, ChatGeneration):\n\u001B[32m 1194\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m generation.message\n",
224
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py:746\u001B[39m, in \u001B[36mBaseChatModel.generate\u001B[39m\u001B[34m(self, messages, stop, callbacks, tags, metadata, run_name, run_id, **kwargs)\u001B[39m\n\u001B[32m 732\u001B[39m inheritable_metadata = {\n\u001B[32m 733\u001B[39m **(metadata \u001B[38;5;129;01mor\u001B[39;00m {}),\n\u001B[32m 734\u001B[39m **\u001B[38;5;28mself\u001B[39m._get_ls_params(stop=stop, **kwargs),\n\u001B[32m 735\u001B[39m }\n\u001B[32m 737\u001B[39m callback_manager = CallbackManager.configure(\n\u001B[32m 738\u001B[39m callbacks,\n\u001B[32m 739\u001B[39m \u001B[38;5;28mself\u001B[39m.callbacks,\n\u001B[32m (...)\u001B[39m\u001B[32m 744\u001B[39m \u001B[38;5;28mself\u001B[39m.metadata,\n\u001B[32m 745\u001B[39m )\n\u001B[32m--> \u001B[39m\u001B[32m746\u001B[39m messages_to_trace = \u001B[43m[\u001B[49m\n\u001B[32m 747\u001B[39m \u001B[43m \u001B[49m\u001B[43m_format_for_tracing\u001B[49m\u001B[43m(\u001B[49m\u001B[43mmessage_list\u001B[49m\u001B[43m)\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;28;43;01mfor\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mmessage_list\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;129;43;01min\u001B[39;49;00m\u001B[43m \u001B[49m\u001B[43mmessages\u001B[49m\n\u001B[32m 748\u001B[39m \u001B[43m\u001B[49m\u001B[43m]\u001B[49m\n\u001B[32m 749\u001B[39m run_managers = callback_manager.on_chat_model_start(\n\u001B[32m 750\u001B[39m \u001B[38;5;28mself\u001B[39m._serialized,\n\u001B[32m 751\u001B[39m messages_to_trace,\n\u001B[32m (...)\u001B[39m\u001B[32m 756\u001B[39m batch_size=\u001B[38;5;28mlen\u001B[39m(messages),\n\u001B[32m 757\u001B[39m )\n\u001B[32m 758\u001B[39m results = []\n",
225
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py:747\u001B[39m, in \u001B[36m<listcomp>\u001B[39m\u001B[34m(.0)\u001B[39m\n\u001B[32m 732\u001B[39m inheritable_metadata = {\n\u001B[32m 733\u001B[39m **(metadata \u001B[38;5;129;01mor\u001B[39;00m {}),\n\u001B[32m 734\u001B[39m **\u001B[38;5;28mself\u001B[39m._get_ls_params(stop=stop, **kwargs),\n\u001B[32m 735\u001B[39m }\n\u001B[32m 737\u001B[39m callback_manager = CallbackManager.configure(\n\u001B[32m 738\u001B[39m callbacks,\n\u001B[32m 739\u001B[39m \u001B[38;5;28mself\u001B[39m.callbacks,\n\u001B[32m (...)\u001B[39m\u001B[32m 744\u001B[39m \u001B[38;5;28mself\u001B[39m.metadata,\n\u001B[32m 745\u001B[39m )\n\u001B[32m 746\u001B[39m messages_to_trace = [\n\u001B[32m--> \u001B[39m\u001B[32m747\u001B[39m \u001B[43m_format_for_tracing\u001B[49m\u001B[43m(\u001B[49m\u001B[43mmessage_list\u001B[49m\u001B[43m)\u001B[49m \u001B[38;5;28;01mfor\u001B[39;00m message_list \u001B[38;5;129;01min\u001B[39;00m messages\n\u001B[32m 748\u001B[39m ]\n\u001B[32m 749\u001B[39m run_managers = callback_manager.on_chat_model_start(\n\u001B[32m 750\u001B[39m \u001B[38;5;28mself\u001B[39m._serialized,\n\u001B[32m 751\u001B[39m messages_to_trace,\n\u001B[32m (...)\u001B[39m\u001B[32m 756\u001B[39m batch_size=\u001B[38;5;28mlen\u001B[39m(messages),\n\u001B[32m 757\u001B[39m )\n\u001B[32m 758\u001B[39m results = []\n",
226
+ "\u001B[36mFile \u001B[39m\u001B[32m~/PycharmProjects/Final_Assignment_Template/.venv/lib/python3.11/site-packages/langchain_core/language_models/chat_models.py:124\u001B[39m, in \u001B[36m_format_for_tracing\u001B[39m\u001B[34m(messages)\u001B[39m\n\u001B[32m 122\u001B[39m \u001B[38;5;28;01mfor\u001B[39;00m message \u001B[38;5;129;01min\u001B[39;00m messages:\n\u001B[32m 123\u001B[39m message_to_trace = message\n\u001B[32m--> \u001B[39m\u001B[32m124\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(\u001B[43mmessage\u001B[49m\u001B[43m.\u001B[49m\u001B[43mcontent\u001B[49m, \u001B[38;5;28mlist\u001B[39m):\n\u001B[32m 125\u001B[39m \u001B[38;5;28;01mfor\u001B[39;00m idx, block \u001B[38;5;129;01min\u001B[39;00m \u001B[38;5;28menumerate\u001B[39m(message.content):\n\u001B[32m 126\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m (\n\u001B[32m 127\u001B[39m \u001B[38;5;28misinstance\u001B[39m(block, \u001B[38;5;28mdict\u001B[39m)\n\u001B[32m 128\u001B[39m \u001B[38;5;129;01mand\u001B[39;00m block.get(\u001B[33m\"\u001B[39m\u001B[33mtype\u001B[39m\u001B[33m\"\u001B[39m) == \u001B[33m\"\u001B[39m\u001B[33mimage\u001B[39m\u001B[33m\"\u001B[39m\n\u001B[32m 129\u001B[39m \u001B[38;5;129;01mand\u001B[39;00m is_data_content_block(block)\n\u001B[32m 130\u001B[39m ):\n",
227
+ "\u001B[31mAttributeError\u001B[39m: 'str' object has no attribute 'content'"
228
+ ]
229
+ }
230
+ ],
231
+ "execution_count": 11
232
+ },
233
+ {
234
+ "metadata": {
235
+ "ExecuteTime": {
236
+ "end_time": "2025-04-26T19:28:51.451989Z",
237
+ "start_time": "2025-04-26T19:28:51.368687Z"
238
+ }
239
+ },
240
+ "cell_type": "code",
241
+ "source": "res",
242
+ "id": "98d30f6eee7b2908",
243
  "outputs": [
244
  {
245
  "ename": "NameError",
246
+ "evalue": "name 'res' is not defined",
247
  "output_type": "error",
248
  "traceback": [
249
  "\u001B[31m---------------------------------------------------------------------------\u001B[39m",
250
  "\u001B[31mNameError\u001B[39m Traceback (most recent call last)",
251
+ "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[1]\u001B[39m\u001B[32m, line 1\u001B[39m\n\u001B[32m----> \u001B[39m\u001B[32m1\u001B[39m \u001B[43mres\u001B[49m\n",
252
+ "\u001B[31mNameError\u001B[39m: name 'res' is not defined"
253
  ]
254
  }
255
  ],
256
+ "execution_count": 1
257
  },
258
  {
259
  "metadata": {},
 
261
  "outputs": [],
262
  "execution_count": null,
263
  "source": "",
264
+ "id": "28dae3f2d9a4220d"
265
  }
266
  ],
267
  "metadata": {
prompts/system_prompt.md ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Task: Answering GAIA Benchmark Questions
2
+ You are tasked with answering questions from the GAIA benchmark for AI agents.
3
+
4
+ Provide ONLY the precise answer to the question. Do not include explanations, reasoning, or any additional text. Be direct, specific, and concise to meet the strict exact-matching requirements of the GAIA benchmark.
5
+
6
+ # Output Format
7
+ - **Single-word or short-phrase answers:** If the question necessitates a brief answer, provide just that word or phrase.
8
+ - **Numerical values:** Provide only the number when applicable, with no additional formatting or units unless specifically requested.
9
+ - **Full sentences:** If the question expects a sentence, provide the exact sentence required with no extra characters, punctuation, or formatting.
10
+
11
+ # Notes
12
+ - Be aware of strict exact-matching requirements; even minor deviations can result in an incorrect response.
13
+ - If any ambiguity exists in the phrasing of the input, respond with an answer that aligns with the GAIA benchmark's intended interpretation."""), ("user", question)])
requirements.txt CHANGED
@@ -5,4 +5,5 @@ langchain~=0.3.24
5
  dotenv~=0.9.9
6
  python-dotenv~=1.1.0
7
  typing_extensions~=4.13.2
8
- langchain-groq~=0.3.2
 
 
5
  dotenv~=0.9.9
6
  python-dotenv~=1.1.0
7
  typing_extensions~=4.13.2
8
+ langchain-groq~=0.3.2
9
+ langgraph~=0.3.34
test.ipynb ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "metadata": {
5
+ "ExecuteTime": {
6
+ "end_time": "2025-04-26T16:18:43.794743Z",
7
+ "start_time": "2025-04-26T16:18:43.451406Z"
8
+ }
9
+ },
10
+ "cell_type": "code",
11
+ "source": [
12
+ "from dotenv import load_dotenv\n",
13
+ "\n",
14
+ "\n",
15
+ "load_dotenv()\n",
16
+ "\n",
17
+ "from langchain.chat_models import init_chat_model\n",
18
+ "\n",
19
+ "model = init_chat_model(\"llama-3.3-70b-versatile\", model_provider=\"groq\")\n",
20
+ "\n",
21
+ "\n"
22
+ ],
23
+ "id": "8650e18ecedee76b",
24
+ "outputs": [],
25
+ "execution_count": 1
26
+ },
27
+ {
28
+ "metadata": {
29
+ "ExecuteTime": {
30
+ "end_time": "2025-04-26T16:47:43.167391Z",
31
+ "start_time": "2025-04-26T16:47:42.533561Z"
32
+ }
33
+ },
34
+ "cell_type": "code",
35
+ "source": [
36
+ "\n",
37
+ "\n",
38
+ "\n",
39
+ "response = model.invoke([(\"system\", \"You are a general AI assistant. I will ask you a question. Report your thoughts, and finish your answer with the following template: FINAL ANSWER: [YOUR FINAL ANSWER]. YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings. If you are asked for a number, don't use comma to write your number neither use units such as $ or percent sign unless specified otherwise. If you are asked for a string, don't use articles, neither abbreviations (e.g. for cities), and write the digits in plain text unless specified otherwise. If you are asked for a comma separated list, apply the above rules depending of whether the element to be put in the list is a number or a string.\"), (\"user\", \"How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)? You can use the latest 2022 version of english wikipedia.\")])"
40
+ ],
41
+ "id": "2cf579b261985300",
42
+ "outputs": [],
43
+ "execution_count": 7
44
+ },
45
+ {
46
+ "metadata": {
47
+ "ExecuteTime": {
48
+ "end_time": "2025-04-26T16:44:53.835775Z",
49
+ "start_time": "2025-04-26T16:44:53.788795Z"
50
+ }
51
+ },
52
+ "cell_type": "code",
53
+ "source": "response",
54
+ "id": "6f8ec7273ff2b9c9",
55
+ "outputs": [
56
+ {
57
+ "data": {
58
+ "text/plain": [
59
+ "AIMessage(content=\"According to the 2022 English Wikipedia, Mercedes Sosa published the following studio albums between 2000 and 2009:\\n\\n1. Acústico (2002) - Although described as 'acústico', some sources consider this as a live album.\\n2. Argentina Que Tal (2001) - This is considered a 'live with guests' album but some sources consider this as a studio album.\\n3. Corazón Libre (2005) - This is considered a studio album.\\n4. Cantora (2009) - This is considered a studio album.\\n\\nTherefore, the number of studio albums published by Mercedes Sosa between 2000 and 2009 (included) can be considered as 3 or 4, depending on how 'Acústico' and 'Argentina Que Tal' are classified.\", additional_kwargs={}, response_metadata={'token_usage': {'completion_tokens': 171, 'prompt_tokens': 69, 'total_tokens': 240, 'completion_time': 0.621818182, 'prompt_time': 0.004940866, 'queue_time': 0.093644521, 'total_time': 0.626759048}, 'model_name': 'llama-3.3-70b-versatile', 'system_fingerprint': 'fp_9a8b91ba77', 'finish_reason': 'stop', 'logprobs': None}, id='run-151ecdee-f777-4723-98bc-0b5e0243300a-0', usage_metadata={'input_tokens': 69, 'output_tokens': 171, 'total_tokens': 240})"
60
+ ]
61
+ },
62
+ "execution_count": 5,
63
+ "metadata": {},
64
+ "output_type": "execute_result"
65
+ }
66
+ ],
67
+ "execution_count": 5
68
+ },
69
+ {
70
+ "metadata": {
71
+ "ExecuteTime": {
72
+ "end_time": "2025-04-26T16:47:46.695284Z",
73
+ "start_time": "2025-04-26T16:47:46.684725Z"
74
+ }
75
+ },
76
+ "cell_type": "code",
77
+ "source": "response.content",
78
+ "id": "ee0f4c8b47699033",
79
+ "outputs": [
80
+ {
81
+ "data": {
82
+ "text/plain": [
83
+ "'To find out how many studio albums were published by Mercedes Sosa between 2000 and 2009, I need to look at her discography. According to the Wikipedia page on Mercedes Sosa, her studio albums between 2000 and 2009 are: \\n- Acústico (2002) \\n- Corazón libre (2005) \\n- Cantora (2009).\\n\\nFINAL ANSWER: 3'"
84
+ ]
85
+ },
86
+ "execution_count": 8,
87
+ "metadata": {},
88
+ "output_type": "execute_result"
89
+ }
90
+ ],
91
+ "execution_count": 8
92
+ },
93
+ {
94
+ "metadata": {},
95
+ "cell_type": "code",
96
+ "outputs": [],
97
+ "execution_count": null,
98
+ "source": "",
99
+ "id": "1fa2a5fe69809336"
100
+ }
101
+ ],
102
+ "metadata": {
103
+ "kernelspec": {
104
+ "display_name": "Python 3",
105
+ "language": "python",
106
+ "name": "python3"
107
+ },
108
+ "language_info": {
109
+ "codemirror_mode": {
110
+ "name": "ipython",
111
+ "version": 2
112
+ },
113
+ "file_extension": ".py",
114
+ "mimetype": "text/x-python",
115
+ "name": "python",
116
+ "nbconvert_exporter": "python",
117
+ "pygments_lexer": "ipython2",
118
+ "version": "2.7.6"
119
+ }
120
+ },
121
+ "nbformat": 4,
122
+ "nbformat_minor": 5
123
+ }