Aya1610 commited on
Commit
3f0d696
·
verified ·
1 Parent(s): d13f2e3

Delete agent.py

Browse files
Files changed (1) hide show
  1. agent.py +0 -319
agent.py DELETED
@@ -1,319 +0,0 @@
1
- import os
2
- from dotenv import load_dotenv
3
- from duckduckgo_search import ddg
4
- load_dotenv()
5
-
6
- # --- Supabase Setup (only if credentials are provided) ---
7
- supabase_url = os.getenv("SUPABASE_URL")
8
- supabase_key = os.getenv("SUPABASE_SERVICE_KEY") or os.getenv("SUPABASE_KEY")
9
-
10
- if supabase_url and supabase_key:
11
- from supabase.client import Client, create_client
12
- from langchain_community.vectorstores import SupabaseVectorStore
13
- from langchain.tools.retriever import create_retriever_tool
14
- from langchain_openai import OpenAIEmbeddings
15
- supabase: Client = create_client(supabase_url, supabase_key)
16
- else:
17
- supabase = None
18
-
19
- # --- Standard Imports ---
20
- from langgraph.graph import START, StateGraph, MessagesState
21
- from langgraph.prebuilt import tools_condition, ToolNode
22
- from langchain_core.messages import SystemMessage, HumanMessage, AIMessage
23
- from langchain_core.tools import tool
24
-
25
- # OpenAI LLM
26
- from langchain_openai import ChatOpenAI
27
-
28
- # Optional document loaders
29
- from langchain_community.tools.tavily_search import TavilySearchResults
30
- from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
31
-
32
- # --- Simple Math Tools ---
33
- @tool
34
- def multiply(a: int, b: int) -> int:
35
- """Multiply two integers and return the result"""
36
- return a * b
37
-
38
- @tool
39
- def add(a: int, b: int) -> int:
40
- """Add two integers and return the sum"""
41
- return a + b
42
-
43
- @tool
44
- def subtract(a: int, b: int) -> int:
45
- """Subtract the second integer from the first and return the difference"""
46
- return a - b
47
-
48
- @tool
49
- def divide(a: int, b: int) -> float:
50
- """Divide the first integer by the second and return the quotient"""
51
- if b == 0:
52
- raise ValueError("Cannot divide by zero.")
53
- return a / b
54
-
55
- @tool
56
- def modulus(a: int, b: int) -> int:
57
- """Return the modulus of dividing the first integer by the second"""
58
- return a % b
59
-
60
- # --- Search Tools ---
61
- @tool
62
- def wiki_search(query: str) -> str:
63
- """Search Wikipedia for the query and return up to 2 documents"""
64
- try:
65
- docs = WikipediaLoader(query=query, load_max_docs=2).load()
66
- return "\n\n---\n\n".join(
67
- f'<Document source="{doc.metadata["source"]}"/>\n{doc.page_content}' for doc in docs
68
- )
69
- except Exception as e:
70
- return f"Wikipedia search failed: {str(e)}"
71
-
72
- @tool
73
- def web_search(query: str) -> str:
74
- """
75
- Search DuckDuckGo via the DDGS class and return the top 3 results
76
- (title, URL, and snippet).
77
- """
78
- try:
79
- with DDGS() as ddgs:
80
- results = ddgs.text(query, max_results=3)
81
- except Exception as e:
82
- return f"DuckDuckGo search error: {e}"
83
-
84
- if not results:
85
- return "No results from DuckDuckGo."
86
-
87
- formatted = []
88
- for r in results:
89
- title = r.get("title", "<no title>").strip()
90
- url = r.get("href", "<no URL>").strip()
91
- body = r.get("body", "").strip()
92
- formatted.append(f"• **{title}**\n {url}\n _{body}_")
93
- return "\n\n".join(formatted)
94
- # def web_search(query: str) -> str:
95
- # """Search the web using Tavily and return up to 3 results"""
96
- # try:
97
- # tavily_api_key = os.getenv("TAVILY_API_KEY")
98
- # if not tavily_api_key:
99
- # return "Web search unavailable: TAVILY_API_KEY not configured"
100
-
101
- # search_tool = TavilySearchResults(max_results=3, api_key=tavily_api_key)
102
- # docs = search_tool.invoke({"query": query})
103
- # return "\n\n---\n\n".join(
104
- # f'<Document source="{doc.get("url", "Unknown")}"/>\n{doc.get("content", "")}' for doc in docs
105
- # )
106
- # except Exception as e:
107
- # return f"Web search failed: {str(e)}"
108
-
109
- @tool
110
- def arxiv_search(query: str) -> str:
111
- """Search Arxiv for the query and return up to 3 documents"""
112
- try:
113
- docs = ArxivLoader(query=query, load_max_docs=3).load()
114
- return "\n\n---\n\n".join(
115
- f'<Document source="{doc.metadata["source"]}"/>\n{doc.page_content[:1000]}' for doc in docs
116
- )
117
- except Exception as e:
118
- return f"Arxiv search failed: {str(e)}"
119
-
120
- # --- Assemble Tools List ---
121
- tools = [multiply, add, subtract, divide, modulus, wiki_search, web_search, arxiv_search]
122
-
123
- # If supabase is configured, add retriever tool
124
- if supabase:
125
- try:
126
- embeddings = OpenAIEmbeddings()
127
- vector_store = SupabaseVectorStore(
128
- client=supabase,
129
- embedding=embeddings,
130
- table_name="documents",
131
- query_name="match_documents_langchain",
132
- )
133
- retriever_tool = create_retriever_tool(
134
- retriever=vector_store.as_retriever(),
135
- name="Question Search",
136
- description="Retrieve similar questions from the vector store",
137
- )
138
- tools.append(retriever_tool)
139
- except Exception as e:
140
- print(f"Could not initialize Supabase retriever: {e}")
141
-
142
- # --- Load System Prompt ---
143
- def load_system_prompt():
144
- """Load system prompt with fallback"""
145
- try:
146
- with open("system_prompt.txt", "r", encoding="utf-8") as f:
147
- return SystemMessage(content=f.read())
148
- except FileNotFoundError:
149
- # Fallback system prompt
150
- default_prompt = """You are a helpful AI assistant with access to various tools including:
151
- - Math operations (add, subtract, multiply, divide, modulus)
152
- - Search capabilities (Wikipedia, Arxiv, web search via Tavily)
153
- - Information retrieval
154
-
155
- Use these tools when appropriate to answer questions accurately and helpfully. When performing calculations, always use the provided math tools. When users ask for information that might require current data or research, use the appropriate search tools.
156
-
157
- Be concise but thorough in your responses. If you use a tool, explain what you found or calculated."""
158
- return SystemMessage(content=default_prompt)
159
-
160
- sys_msg = load_system_prompt()
161
-
162
- # --- Graph Builder (OpenAI) ---
163
- def build_graph():
164
- """
165
- Build and return a StateGraph using OpenAI ChatGPT with tools.
166
- """
167
- print("=== BUILDING OPENAI GRAPH ===")
168
-
169
- # Check for OpenAI API key
170
- openai_api_key = os.getenv("OPENAI_API_KEY")
171
- print(f"OpenAI API Key: {'Found' if openai_api_key else 'Not found'}")
172
-
173
- if openai_api_key:
174
- print(f"API Key starts with: {openai_api_key[:10]}...")
175
-
176
- try:
177
- if openai_api_key and len(openai_api_key.strip()) > 0:
178
- print("Attempting to initialize OpenAI ChatGPT...")
179
-
180
- # Initialize OpenAI LLM
181
- llm = ChatOpenAI(
182
- model="gpt-4", # You can change to "gpt-4" if you have access
183
- temperature=0.1,
184
- api_key=openai_api_key.strip(),
185
- max_tokens=512
186
- )
187
-
188
- # Test the connection
189
- test_response = llm.invoke([HumanMessage(content="Hello")])
190
- print("✓ Successfully connected to OpenAI")
191
- print(f"Test response: {test_response.content[:50]}...")
192
-
193
- else:
194
- raise Exception("No valid OPENAI_API_KEY found")
195
-
196
- except Exception as e:
197
- print(f"Error initializing OpenAI LLM: {e}")
198
- print("Creating functional mock LLM...")
199
-
200
- class FunctionalMockLLM:
201
- def bind_tools(self, tools):
202
- self.tools = tools
203
- return self
204
-
205
- def invoke(self, messages):
206
- from langchain_core.messages import AIMessage
207
- import json
208
- import re
209
-
210
- last_msg = messages[-1] if messages else None
211
- if not last_msg:
212
- return AIMessage(content="Please ask me a question!")
213
-
214
- content = getattr(last_msg, 'content', str(last_msg))
215
- content_lower = content.lower()
216
-
217
- # Handle math operations with tool calls
218
- math_patterns = [
219
- (r'(\d+)\s*\+\s*(\d+)', 'add'),
220
- (r'(\d+)\s*-\s*(\d+)', 'subtract'),
221
- (r'(\d+)\s*\*\s*(\d+)', 'multiply'),
222
- (r'(\d+)\s*/\s*(\d+)', 'divide'),
223
- (r'(\d+)\s*%\s*(\d+)', 'modulus'),
224
- ]
225
-
226
- for pattern, operation in math_patterns:
227
- match = re.search(pattern, content)
228
- if match:
229
- a, b = int(match.group(1)), int(match.group(2))
230
-
231
- tool_call = {
232
- "name": operation,
233
- "args": {"a": a, "b": b},
234
- "id": f"call_{operation}_{a}_{b}"
235
- }
236
-
237
- return AIMessage(
238
- content=f"I'll {operation} {a} and {b} for you.",
239
- tool_calls=[tool_call]
240
- )
241
-
242
- # Handle search requests
243
- if any(word in content_lower for word in ['search', 'find', 'look up', 'what is', 'who is', 'tell me about']):
244
- # Extract search query
245
- search_query = content
246
- for phrase in ['search for', 'find', 'look up', 'what is', 'who is', 'tell me about']:
247
- search_query = search_query.lower().replace(phrase, '').strip()
248
-
249
- if len(search_query) > 100:
250
- search_query = search_query[:100]
251
-
252
- if 'wikipedia' in content_lower:
253
- tool_name = "wiki_search"
254
- elif 'arxiv' in content_lower or 'research' in content_lower or 'paper' in content_lower:
255
- tool_name = "arxiv_search"
256
- else:
257
- tool_name = "web_search"
258
-
259
- tool_call = {
260
- "name": tool_name,
261
- "args": {"query": search_query},
262
- "id": f"call_{tool_name}_{hash(search_query) % 1000}"
263
- }
264
-
265
- return AIMessage(
266
- content=f"I'll search for information about: {search_query}",
267
- tool_calls=[tool_call]
268
- )
269
-
270
- # Default response for other questions
271
- return AIMessage(content=f"I understand you're asking: {content[:200]}... I can help with math calculations and information searches. Please configure OPENAI_API_KEY for full functionality, or try asking me to calculate something or search for information.")
272
-
273
- llm = FunctionalMockLLM()
274
- print("✓ Using functional mock LLM")
275
-
276
- # Bind tools to LLM
277
- llm_with_tools = llm.bind_tools(tools)
278
-
279
- def retriever(state: MessagesState):
280
- """Add system message and handle retrieval if Supabase is available"""
281
- messages = [sys_msg] + state["messages"]
282
-
283
- if supabase and len(tools) > 8: # Check if retriever tool was added
284
- try:
285
- query = state["messages"][-1].content
286
- docs = vector_store.similarity_search(query, k=1)
287
- if docs:
288
- doc = docs[0]
289
- content = doc.page_content
290
- answer = content.split("Final answer :")[-1].strip() if "Final answer :" in content else content.strip()
291
- return {"messages": messages + [AIMessage(content=f"Retrieved context: {answer}")]}
292
- except Exception as e:
293
- print(f"Retrieval error: {e}")
294
-
295
- return {"messages": messages}
296
-
297
- def assistant(state: MessagesState):
298
- """Main assistant function"""
299
- try:
300
- response = llm_with_tools.invoke(state["messages"])
301
- return {"messages": [response]}
302
- except Exception as e:
303
- print(f"Assistant error: {e}")
304
- return {"messages": [AIMessage(content=f"I encountered an error: {str(e)}. Please make sure your OPENAI_API_KEY is configured correctly.")]}
305
-
306
- # Build the graph
307
- g = StateGraph(MessagesState)
308
- g.add_node("retriever", retriever)
309
- g.add_node("assistant", assistant)
310
- g.add_node("tools", ToolNode(tools))
311
-
312
- # Define edges
313
- g.add_edge(START, "retriever")
314
- g.add_edge("retriever", "assistant")
315
- g.add_conditional_edges("assistant", tools_condition)
316
- g.add_edge("tools", "assistant")
317
-
318
- print("✓ Graph compiled successfully")
319
- return g.compile()