Scott Cogan commited on
Commit
d445b1f
·
1 Parent(s): dd0227b

requirements update for llm compat

Browse files
Files changed (1) hide show
  1. app.py +20 -7
app.py CHANGED
@@ -329,19 +329,30 @@ class BasicAgent:
329
  }
330
 
331
  logger.info("Attempting to use primary LLM (Gemini)")
332
- response = self.primary_llm.invoke(
333
- messages_with_system,
334
- tools=[genai_tool]
335
- )
336
- logger.info("Successfully used primary LLM")
 
 
 
 
 
 
 
 
 
 
 
 
337
 
338
  except Exception as e:
339
  error_str = str(e)
340
  logger.error(f"Primary LLM error: {error_str}")
341
 
342
  # Check if we should try fallback
343
- if (hasattr(self, 'fallback_llm') and self.fallback_llm is not None and
344
- ("429" in error_str or "object" in error_str or "string" in error_str)):
345
  try:
346
  logger.info("Attempting to use fallback LLM (OpenAI)")
347
  # For OpenAI, we can use the system message directly
@@ -365,6 +376,8 @@ class BasicAgent:
365
  }
366
  }]
367
  )
 
 
368
  logger.info("Successfully used fallback LLM")
369
  except Exception as fallback_error:
370
  logger.error(f"Fallback LLM error: {str(fallback_error)}")
 
329
  }
330
 
331
  logger.info("Attempting to use primary LLM (Gemini)")
332
+ try:
333
+ response = self.primary_llm.invoke(
334
+ messages_with_system,
335
+ tools=[genai_tool]
336
+ )
337
+ if not response or not hasattr(response, 'content'):
338
+ raise ValueError("Invalid response format from Gemini")
339
+ logger.info("Successfully used primary LLM")
340
+ except Exception as e:
341
+ if "list index out of range" in str(e):
342
+ # Try without tools if tool configuration fails
343
+ response = self.primary_llm.invoke(messages_with_system)
344
+ if not response or not hasattr(response, 'content'):
345
+ raise ValueError("Invalid response format from Gemini")
346
+ logger.info("Successfully used primary LLM without tools")
347
+ else:
348
+ raise
349
 
350
  except Exception as e:
351
  error_str = str(e)
352
  logger.error(f"Primary LLM error: {error_str}")
353
 
354
  # Check if we should try fallback
355
+ if hasattr(self, 'fallback_llm') and self.fallback_llm is not None:
 
356
  try:
357
  logger.info("Attempting to use fallback LLM (OpenAI)")
358
  # For OpenAI, we can use the system message directly
 
376
  }
377
  }]
378
  )
379
+ if not response or not hasattr(response, 'content'):
380
+ raise ValueError("Invalid response format from fallback LLM")
381
  logger.info("Successfully used fallback LLM")
382
  except Exception as fallback_error:
383
  logger.error(f"Fallback LLM error: {str(fallback_error)}")