Scott Cogan commited on
Commit
c17f0e1
·
1 Parent(s): cf5abb1

requirements update for llm compat

Browse files
Files changed (1) hide show
  1. app.py +32 -27
app.py CHANGED
@@ -310,22 +310,24 @@ class BasicAgent:
310
  else:
311
  messages_with_system = [self.sys_msg] + messages
312
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
313
  response = self.primary_llm.invoke(
314
  messages_with_system,
315
- tools=[{"type": "function", "function": {
316
- "name": "google_search",
317
- "description": "Search for information on the web",
318
- "parameters": {
319
- "type": "object",
320
- "properties": {
321
- "query": {
322
- "type": "string",
323
- "description": "The search query"
324
- }
325
- },
326
- "required": ["query"]
327
- }
328
- }}]
329
  )
330
  except Exception as e:
331
  error_str = str(e)
@@ -337,20 +339,23 @@ class BasicAgent:
337
  # For OpenAI, we can use the system message directly
338
  response = self.fallback_llm.invoke(
339
  [self.sys_msg] + messages,
340
- tools=[{"type": "function", "function": {
341
- "name": "google_search",
342
- "description": "Search for information on the web",
343
- "parameters": {
344
- "type": "object",
345
- "properties": {
346
- "query": {
347
- "type": "string",
348
- "description": "The search query"
349
- }
350
- },
351
- "required": ["query"]
 
 
 
352
  }
353
- }}]
354
  )
355
  logger.info("Successfully used fallback LLM")
356
  except Exception as fallback_error:
 
310
  else:
311
  messages_with_system = [self.sys_msg] + messages
312
 
313
+ # Define tools in Gemini's format
314
+ tools = [{
315
+ "name": "google_search",
316
+ "description": "Search for information on the web",
317
+ "parameters": {
318
+ "properties": {
319
+ "query": {
320
+ "description": "The search query",
321
+ "type": "string"
322
+ }
323
+ },
324
+ "required": ["query"]
325
+ }
326
+ }]
327
+
328
  response = self.primary_llm.invoke(
329
  messages_with_system,
330
+ tools=tools
 
 
 
 
 
 
 
 
 
 
 
 
 
331
  )
332
  except Exception as e:
333
  error_str = str(e)
 
339
  # For OpenAI, we can use the system message directly
340
  response = self.fallback_llm.invoke(
341
  [self.sys_msg] + messages,
342
+ tools=[{
343
+ "type": "function",
344
+ "function": {
345
+ "name": "google_search",
346
+ "description": "Search for information on the web",
347
+ "parameters": {
348
+ "type": "object",
349
+ "properties": {
350
+ "query": {
351
+ "type": "string",
352
+ "description": "The search query"
353
+ }
354
+ },
355
+ "required": ["query"]
356
+ }
357
  }
358
+ }]
359
  )
360
  logger.info("Successfully used fallback LLM")
361
  except Exception as fallback_error: