Kackle commited on
Commit
e8c0262
·
verified ·
1 Parent(s): 66e5d2c

more tools now that we shouldnt hit rate limits

Browse files
Files changed (1) hide show
  1. gemini_agent.py +26 -7
gemini_agent.py CHANGED
@@ -5,6 +5,9 @@ from excel_parser import ExcelParser
5
  import re
6
  import time
7
  import asyncio
 
 
 
8
 
9
  load_dotenv()
10
 
@@ -22,6 +25,9 @@ class GeminiAgent:
22
 
23
  # Initialize parsers
24
  self.excel_parser = ExcelParser()
 
 
 
25
 
26
  async def __call__(self, question: str) -> str:
27
  print(f"GeminiAgent received question (first 50 chars): {question}...")
@@ -157,6 +163,20 @@ Provide only the direct answer. If it's a quote, give just the quoted text. If i
157
  async def _handle_text_question(self, question: str) -> str:
158
  """Handle regular text-based questions"""
159
  prompt = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
160
  # Handle attached file questions with enhanced prompts
161
  if 'attached' in question.lower():
162
  if 'python code' in question.lower():
@@ -194,6 +214,12 @@ Provide only the direct answer. If it's a quote, give just the quoted text. If i
194
  else:
195
  prompt = f"""Answer this question with only the essential information requested:\n\n{question}\n\nAnswer:"""
196
 
 
 
 
 
 
 
197
  # Use the constructed prompt for all cases
198
  await self._rate_limit()
199
  response = self.model.generate_content(
@@ -242,13 +268,6 @@ Provide only the direct answer. If it's a quote, give just the quoted text. If i
242
  if list_match:
243
  answer = ', '.join([item.strip() for item in list_match if item.strip()])
244
 
245
- # Wikipedia tool integration (simple version)
246
- if 'wikipedia' in question.lower() or 'according to wikipedia' in question.lower():
247
- # Add a Wikipedia search instruction to the prompt if not already present
248
- if 'wikipedia' not in prompt.lower():
249
- prompt += "\nIf you do not know the answer, search the latest English Wikipedia and use only information from there."
250
- # Optionally, you could call a real Wikipedia API here for retrieval-augmented generation
251
-
252
  return answer
253
 
254
  async def _generate_video_answer_from_question(self, question: str, video_id: str) -> str:
 
5
  import re
6
  import time
7
  import asyncio
8
+ # Add LangChain tools for Wikipedia and DuckDuckGo
9
+ from langchain.tools import DuckDuckGoSearchRun, WikipediaQueryRun
10
+ from langchain.utilities import WikipediaAPIWrapper
11
 
12
  load_dotenv()
13
 
 
25
 
26
  # Initialize parsers
27
  self.excel_parser = ExcelParser()
28
+ # Initialize Wikipedia and DuckDuckGo tools
29
+ self.wiki_tool = WikipediaQueryRun(api_wrapper=WikipediaAPIWrapper())
30
+ self.ddg_tool = DuckDuckGoSearchRun()
31
 
32
  async def __call__(self, question: str) -> str:
33
  print(f"GeminiAgent received question (first 50 chars): {question}...")
 
163
  async def _handle_text_question(self, question: str) -> str:
164
  """Handle regular text-based questions"""
165
  prompt = ""
166
+ wiki_context = ""
167
+ ddg_context = ""
168
+ # Wikipedia retrieval
169
+ if 'wikipedia' in question.lower():
170
+ try:
171
+ wiki_context = self.wiki_tool.run(question)
172
+ except Exception as e:
173
+ print(f"Wikipedia tool failed: {e}")
174
+ # DuckDuckGo retrieval
175
+ if 'duckduckgo' in question.lower() or 'web search' in question.lower():
176
+ try:
177
+ ddg_context = self.ddg_tool.run(question)
178
+ except Exception as e:
179
+ print(f"DuckDuckGo tool failed: {e}")
180
  # Handle attached file questions with enhanced prompts
181
  if 'attached' in question.lower():
182
  if 'python code' in question.lower():
 
214
  else:
215
  prompt = f"""Answer this question with only the essential information requested:\n\n{question}\n\nAnswer:"""
216
 
217
+ # Prepend context to the prompt if available
218
+ if wiki_context:
219
+ prompt = f"Use the following Wikipedia context to answer the question:\n{wiki_context}\n\n{prompt}"
220
+ elif ddg_context:
221
+ prompt = f"Use the following web search context to answer the question:\n{ddg_context}\n\n{prompt}"
222
+
223
  # Use the constructed prompt for all cases
224
  await self._rate_limit()
225
  response = self.model.generate_content(
 
268
  if list_match:
269
  answer = ', '.join([item.strip() for item in list_match if item.strip()])
270
 
 
 
 
 
 
 
 
271
  return answer
272
 
273
  async def _generate_video_answer_from_question(self, question: str, video_id: str) -> str: