Scott Cogan commited on
Commit
cf5abb1
·
1 Parent(s): d541b87

requirements update for llm compat

Browse files
Files changed (1) hide show
  1. app.py +11 -2
app.py CHANGED
@@ -213,7 +213,8 @@ class BasicAgent:
213
  self.primary_llm = ChatGoogleGenerativeAI(
214
  model="gemini-2.5-flash-preview-05-20",
215
  max_tokens=8192,
216
- temperature=0
 
217
  )
218
 
219
  # Initialize fallback LLM (if available)
@@ -302,8 +303,15 @@ class BasicAgent:
302
 
303
  # Try primary LLM first
304
  try:
 
 
 
 
 
 
 
305
  response = self.primary_llm.invoke(
306
- [self.sys_msg] + messages,
307
  tools=[{"type": "function", "function": {
308
  "name": "google_search",
309
  "description": "Search for information on the web",
@@ -326,6 +334,7 @@ class BasicAgent:
326
  logger.warning("Daily quota limit reached for primary LLM, trying fallback")
327
  if hasattr(self, 'fallback_llm') and self.fallback_llm is not None:
328
  try:
 
329
  response = self.fallback_llm.invoke(
330
  [self.sys_msg] + messages,
331
  tools=[{"type": "function", "function": {
 
213
  self.primary_llm = ChatGoogleGenerativeAI(
214
  model="gemini-2.5-flash-preview-05-20",
215
  max_tokens=8192,
216
+ temperature=0,
217
+ convert_system_message_to_human=True # Enable system message conversion
218
  )
219
 
220
  # Initialize fallback LLM (if available)
 
303
 
304
  # Try primary LLM first
305
  try:
306
+ # Convert system message to human message for Gemini
307
+ if isinstance(self.sys_msg, SystemMessage):
308
+ system_content = f"System Instructions: {self.sys_msg.content}"
309
+ messages_with_system = [HumanMessage(content=system_content)] + messages
310
+ else:
311
+ messages_with_system = [self.sys_msg] + messages
312
+
313
  response = self.primary_llm.invoke(
314
+ messages_with_system,
315
  tools=[{"type": "function", "function": {
316
  "name": "google_search",
317
  "description": "Search for information on the web",
 
334
  logger.warning("Daily quota limit reached for primary LLM, trying fallback")
335
  if hasattr(self, 'fallback_llm') and self.fallback_llm is not None:
336
  try:
337
+ # For OpenAI, we can use the system message directly
338
  response = self.fallback_llm.invoke(
339
  [self.sys_msg] + messages,
340
  tools=[{"type": "function", "function": {