ktluege commited on
Commit
4d9e0a4
Β·
verified Β·
1 Parent(s): 885ae0e

Update agent.py

Browse files
Files changed (1) hide show
  1. agent.py +184 -32
agent.py CHANGED
@@ -1,74 +1,226 @@
 
1
  import os
2
  from dotenv import load_dotenv
3
- from langchain.graph import START, StateGraph, MessagesState
 
 
4
  from langchain_openai import ChatOpenAI
5
- from langchain_huggingface import HuggingFaceEmbeddings
 
 
 
 
6
  from langchain_community.vectorstores import SupabaseVectorStore
7
- from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
8
  from langchain_core.tools import tool
 
 
9
 
10
  load_dotenv()
11
 
12
- # ... [Your tool definitions here] ...
 
 
 
 
 
 
 
13
 
14
- tools = [multiply, add, subtract, divide, modulus, wiki_search, web_search, arvix_search]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
 
 
 
16
  with open("system_prompt.txt", "r", encoding="utf-8") as f:
17
  system_prompt = f.read()
18
 
 
19
  sys_msg = SystemMessage(content=system_prompt)
20
 
21
- embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
22
- from supabase.client import create_client
23
- supabase = create_client(
24
- os.environ.get("SUPABASE_URL"),
25
  os.environ.get("SUPABASE_SERVICE_KEY"))
26
  vector_store = SupabaseVectorStore(
27
  client=supabase,
28
- embedding=embeddings,
29
  table_name="documents",
30
- query_name="match_documents_langchain"
31
  )
 
 
 
 
 
 
32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  def build_graph(provider: str = "openai"):
34
- if provider == "openai":
 
 
 
 
 
 
 
 
 
 
 
 
35
  llm = ChatOpenAI(
36
  model="gpt-3.5-turbo", # or "gpt-4o"
37
  temperature=0,
38
  openai_api_key=os.environ.get("OPENAI_API_KEY"),
39
  )
40
  else:
41
- raise ValueError("Invalid provider.")
42
 
 
43
  llm_with_tools = llm.bind_tools(tools)
44
- def assistant(state: MessagesState):
45
- user_message = state["messages"][-1]
46
- # You must have llm_with_tools defined earlier in build_graph
47
- result = llm_with_tools.invoke([sys_msg, user_message])
48
- return {"messages": [result]}
49
 
 
 
 
 
50
 
 
 
 
 
 
 
 
 
 
51
 
52
  def retriever(state: MessagesState):
53
  query = state["messages"][-1].content
54
- results = vector_store.similarity_search(query, k=1)
55
- if not results:
56
- return {"messages": [AIMessage(content="FINAL ANSWER: No relevant answer found.")]}
57
- similar_doc = results[0]
58
  content = similar_doc.page_content
59
- if "FINAL ANSWER:" in content:
60
- answer = content.split("FINAL ANSWER:")[-1].strip()
61
- return {"messages": [AIMessage(content=f"FINAL ANSWER: {answer}")]}
62
  else:
63
- return {"messages": [AIMessage(content=content.strip())]}
 
 
64
 
 
 
 
 
 
 
 
 
 
 
 
65
 
66
  builder = StateGraph(MessagesState)
67
  builder.add_node("retriever", retriever)
68
- builder.add_node("assistant", assistant)
69
- builder.add_edge(START, "retriever")
70
- builder.add_edge("retriever", "assistant")
71
- builder.set_finish_point("assistant")
72
- return builder.compile()
73
 
74
- return builder.compile()
 
 
 
 
 
 
1
+ """LangGraph Agent"""
2
  import os
3
  from dotenv import load_dotenv
4
+ from langgraph.graph import START, StateGraph, MessagesState
5
+ from langgraph.prebuilt import tools_condition
6
+ from langgraph.prebuilt import ToolNode
7
  from langchain_openai import ChatOpenAI
8
+ from langchain_groq import ChatGroq
9
+ from langchain_huggingface import ChatHuggingFace, HuggingFaceEndpoint, HuggingFaceEmbeddings
10
+ from langchain_community.tools.tavily_search import TavilySearchResults
11
+ from langchain_community.document_loaders import WikipediaLoader
12
+ from langchain_community.document_loaders import ArxivLoader
13
  from langchain_community.vectorstores import SupabaseVectorStore
14
+ from langchain_core.messages import SystemMessage, HumanMessage
15
  from langchain_core.tools import tool
16
+ from langchain.tools.retriever import create_retriever_tool
17
+ from supabase.client import Client, create_client
18
 
19
  load_dotenv()
20
 
21
+ @tool
22
+ def multiply(a: int, b: int) -> int:
23
+ """Multiply two numbers.
24
+ Args:
25
+ a: first int
26
+ b: second int
27
+ """
28
+ return a * b
29
 
30
+ @tool
31
+ def add(a: int, b: int) -> int:
32
+ """Add two numbers.
33
+
34
+ Args:
35
+ a: first int
36
+ b: second int
37
+ """
38
+ return a + b
39
+
40
+ @tool
41
+ def subtract(a: int, b: int) -> int:
42
+ """Subtract two numbers.
43
+
44
+ Args:
45
+ a: first int
46
+ b: second int
47
+ """
48
+ return a - b
49
+
50
+ @tool
51
+ def divide(a: int, b: int) -> int:
52
+ """Divide two numbers.
53
+
54
+ Args:
55
+ a: first int
56
+ b: second int
57
+ """
58
+ if b == 0:
59
+ raise ValueError("Cannot divide by zero.")
60
+ return a / b
61
+
62
+ @tool
63
+ def modulus(a: int, b: int) -> int:
64
+ """Get the modulus of two numbers.
65
+
66
+ Args:
67
+ a: first int
68
+ b: second int
69
+ """
70
+ return a % b
71
+
72
+ @tool
73
+ def wiki_search(query: str) -> str:
74
+ """Search Wikipedia for a query and return maximum 2 results.
75
+
76
+ Args:
77
+ query: The search query."""
78
+ search_docs = WikipediaLoader(query=query, load_max_docs=2).load()
79
+ formatted_search_docs = "\n\n---\n\n".join(
80
+ [
81
+ f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
82
+ for doc in search_docs
83
+ ])
84
+ return {"wiki_results": formatted_search_docs}
85
+
86
+ @tool
87
+ def web_search(query: str) -> str:
88
+ """Search Tavily for a query and return maximum 3 results.
89
+
90
+ Args:
91
+ query: The search query."""
92
+ search_docs = TavilySearchResults(max_results=3).invoke(query=query)
93
+ formatted_search_docs = "\n\n---\n\n".join(
94
+ [
95
+ f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content}\n</Document>'
96
+ for doc in search_docs
97
+ ])
98
+ return {"web_results": formatted_search_docs}
99
+
100
+ @tool
101
+ def arvix_search(query: str) -> str:
102
+ """Search Arxiv for a query and return maximum 3 result.
103
+
104
+ Args:
105
+ query: The search query."""
106
+ search_docs = ArxivLoader(query=query, load_max_docs=3).load()
107
+ formatted_search_docs = "\n\n---\n\n".join(
108
+ [
109
+ f'<Document source="{doc.metadata["source"]}" page="{doc.metadata.get("page", "")}"/>\n{doc.page_content[:1000]}\n</Document>'
110
+ for doc in search_docs
111
+ ])
112
+ return {"arvix_results": formatted_search_docs}
113
 
114
+
115
+
116
+ # load the system prompt from the file
117
  with open("system_prompt.txt", "r", encoding="utf-8") as f:
118
  system_prompt = f.read()
119
 
120
+ # System message
121
  sys_msg = SystemMessage(content=system_prompt)
122
 
123
+ # build a retriever
124
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2") # dim=768
125
+ supabase: Client = create_client(
126
+ os.environ.get("SUPABASE_URL"),
127
  os.environ.get("SUPABASE_SERVICE_KEY"))
128
  vector_store = SupabaseVectorStore(
129
  client=supabase,
130
+ embedding= embeddings,
131
  table_name="documents",
132
+ query_name="match_documents_langchain",
133
  )
134
+ create_retriever_tool = create_retriever_tool(
135
+ retriever=vector_store.as_retriever(),
136
+ name="Question Search",
137
+ description="A tool to retrieve similar questions from a vector store.",
138
+ )
139
+
140
 
141
+
142
+ tools = [
143
+ multiply,
144
+ add,
145
+ subtract,
146
+ divide,
147
+ modulus,
148
+ wiki_search,
149
+ web_search,
150
+ arvix_search,
151
+ ]
152
+
153
+ # Build graph function
154
  def build_graph(provider: str = "openai"):
155
+ """Build the graph"""
156
+ if provider == "google":
157
+ llm = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0)
158
+ elif provider == "groq":
159
+ llm = ChatGroq(model="qwen-qwq-32b", temperature=0)
160
+ elif provider == "huggingface":
161
+ llm = ChatHuggingFace(
162
+ llm=HuggingFaceEndpoint(
163
+ url="https://api-inference.huggingface.co/models/Meta-DeepLearning/llama-2-7b-chat-hf",
164
+ temperature=0,
165
+ ),
166
+ )
167
+ elif provider == "openai":
168
  llm = ChatOpenAI(
169
  model="gpt-3.5-turbo", # or "gpt-4o"
170
  temperature=0,
171
  openai_api_key=os.environ.get("OPENAI_API_KEY"),
172
  )
173
  else:
174
+ raise ValueError("Invalid provider. Choose 'google', 'groq', 'huggingface', or 'openai'.")
175
 
176
+ # Bind tools to LLM
177
  llm_with_tools = llm.bind_tools(tools)
 
 
 
 
 
178
 
179
+ # Node
180
+ def assistant(state: MessagesState):
181
+ """Assistant node"""
182
+ return {"messages": [llm_with_tools.invoke(state["messages"])]}
183
 
184
+ # def retriever(state: MessagesState):
185
+ # """Retriever node"""
186
+ # similar_question = vector_store.similarity_search(state["messages"][0].content)
187
+ #example_msg = HumanMessage(
188
+ # content=f"Here I provide a similar question and answer for reference: \n\n{similar_question[0].page_content}",
189
+ # )
190
+ # return {"messages": [sys_msg] + state["messages"] + [example_msg]}
191
+
192
+ from langchain_core.messages import AIMessage
193
 
194
  def retriever(state: MessagesState):
195
  query = state["messages"][-1].content
196
+ similar_doc = vector_store.similarity_search(query, k=1)[0]
197
+
 
 
198
  content = similar_doc.page_content
199
+ if "Final answer :" in content:
200
+ answer = content.split("Final answer :")[-1].strip()
 
201
  else:
202
+ answer = content.strip()
203
+
204
+ return {"messages": [AIMessage(content=answer)]}
205
 
206
+ # builder = StateGraph(MessagesState)
207
+ #builder.add_node("retriever", retriever)
208
+ #builder.add_node("assistant", assistant)
209
+ #builder.add_node("tools", ToolNode(tools))
210
+ #builder.add_edge(START, "retriever")
211
+ #builder.add_edge("retriever", "assistant")
212
+ #builder.add_conditional_edges(
213
+ # "assistant",
214
+ # tools_condition,
215
+ #)
216
+ #builder.add_edge("tools", "assistant")
217
 
218
  builder = StateGraph(MessagesState)
219
  builder.add_node("retriever", retriever)
 
 
 
 
 
220
 
221
+ # Retriever ist Start und Endpunkt
222
+ builder.set_entry_point("retriever")
223
+ builder.set_finish_point("retriever")
224
+
225
+ # Compile graph
226
+ return builder.compile()