Scott Cogan commited on
Commit
e1c8ab0
·
1 Parent(s): 8cfdb1c

tool call improvements

Browse files
Files changed (1) hide show
  1. app.py +62 -47
app.py CHANGED
@@ -225,7 +225,7 @@ class BasicAgent:
225
  model="gemini-2.5-flash-preview-05-20",
226
  max_tokens=8192,
227
  temperature=0,
228
- convert_system_message_to_human=True # Enable system message conversion
229
  )
230
 
231
  # Create tool executor
@@ -240,7 +240,43 @@ class BasicAgent:
240
  "google_search": google_search
241
  }
242
 
243
- # System message
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
244
  self.sys_msg = SystemMessage('''You are a general AI assistant. I will ask you a question. Follow these steps:
245
 
246
  1. First, use the google_search tool to find relevant information about the question.
@@ -260,6 +296,7 @@ class BasicAgent:
260
 
261
  IMPORTANT:
262
  - You MUST use the google_search tool before providing your final answer
 
263
  - Your final answer should ONLY be the requested information, no explanations
264
  - If you need to search again, use the tool again
265
  - Do not provide detailed analysis in your final answer
@@ -308,26 +345,15 @@ class BasicAgent:
308
  raise ValueError("Primary LLM not initialized")
309
 
310
  logger.info("Attempting to use primary LLM (OpenAI)")
311
- # For OpenAI, we can use the system message directly
 
 
 
 
 
312
  response = self.primary_llm.invoke(
313
- [self.sys_msg] + messages,
314
- tools=[{
315
- "type": "function",
316
- "function": {
317
- "name": "google_search",
318
- "description": "Search for information on the web",
319
- "parameters": {
320
- "type": "object",
321
- "properties": {
322
- "query": {
323
- "type": "string",
324
- "description": "The search query"
325
- }
326
- },
327
- "required": ["query"]
328
- }
329
- }
330
- }]
331
  )
332
 
333
  if not response or not hasattr(response, 'content'):
@@ -339,7 +365,7 @@ class BasicAgent:
339
  return {"messages": [response], "next": "tools"}
340
  else:
341
  # If no tool call, try without tools
342
- response = self.primary_llm.invoke([self.sys_msg] + messages)
343
  if not response or not hasattr(response, 'content'):
344
  raise ValueError("Invalid response format from OpenAI")
345
  logger.info("Successfully used primary LLM without tools")
@@ -358,27 +384,14 @@ class BasicAgent:
358
  else:
359
  messages_with_system = [self.sys_msg] + messages
360
 
361
- # Create tool configuration for Gemini
362
- genai_tool = {
363
- "function_declarations": [{
364
- "name": "google_search",
365
- "description": "Search for information on the web",
366
- "parameters": {
367
- "type": "object",
368
- "properties": {
369
- "query": {
370
- "type": "string",
371
- "description": "The search query"
372
- }
373
- },
374
- "required": ["query"]
375
- }
376
- }]
377
- }
378
 
379
  response = self.fallback_llm.invoke(
380
- messages_with_system,
381
- tools=[genai_tool]
382
  )
383
 
384
  if not response or not hasattr(response, 'content'):
@@ -390,7 +403,7 @@ class BasicAgent:
390
  return {"messages": [response], "next": "tools"}
391
  else:
392
  # If no tool call, try without tools
393
- response = self.fallback_llm.invoke(messages_with_system)
394
  if not response or not hasattr(response, 'content'):
395
  raise ValueError("Invalid response format from Gemini")
396
  logger.info("Successfully used fallback LLM without tools")
@@ -422,10 +435,7 @@ class BasicAgent:
422
  if any(phrase in content.lower() for phrase in ["let me", "i'll", "i will", "sure", "okay", "alright"]):
423
  logger.info("Model provided acknowledgment instead of tool call, prompting for search")
424
  return {
425
- "messages": messages + [
426
- AIMessage(content="Please use the google_search tool to find the information."),
427
- HumanMessage(content="Please search for the information using the google_search tool.")
428
- ],
429
  "next": "agent"
430
  }
431
 
@@ -489,7 +499,13 @@ class BasicAgent:
489
  result = execute_tool(tool_name, tool_args, self.tools)
490
  logger.info(f"Tool result: {result}")
491
 
 
492
  messages.append(AIMessage(content=f"Tool result: {result}"))
 
 
 
 
 
493
  except Exception as e:
494
  logger.error(f"Error executing tool {tool_name}: {str(e)}")
495
  messages.append(AIMessage(content=f"Tool error: {str(e)}"))
@@ -499,7 +515,6 @@ class BasicAgent:
499
  if any(phrase in content for phrase in ["let me", "i'll", "i will", "sure", "okay", "alright"]):
500
  logger.info("No tool calls found, prompting for search")
501
  messages.append(AIMessage(content="Please use the google_search tool to find the information."))
502
- messages.append(HumanMessage(content="Please search for the information using the google_search tool."))
503
  else:
504
  logger.info("No tool calls found in AI message")
505
  # If the message looks like a final answer, return it
 
225
  model="gemini-2.5-flash-preview-05-20",
226
  max_tokens=8192,
227
  temperature=0,
228
+ convert_system_message_to_human=True
229
  )
230
 
231
  # Create tool executor
 
240
  "google_search": google_search
241
  }
242
 
243
+ # Define tool configurations
244
+ self.openai_tools = [{
245
+ "type": "function",
246
+ "function": {
247
+ "name": "google_search",
248
+ "description": "Search for information on the web. Use this tool to find specific information about the question.",
249
+ "parameters": {
250
+ "type": "object",
251
+ "properties": {
252
+ "query": {
253
+ "type": "string",
254
+ "description": "The search query to find relevant information"
255
+ }
256
+ },
257
+ "required": ["query"]
258
+ }
259
+ }
260
+ }]
261
+
262
+ self.gemini_tools = [{
263
+ "function_declarations": [{
264
+ "name": "google_search",
265
+ "description": "Search for information on the web. Use this tool to find specific information about the question.",
266
+ "parameters": {
267
+ "type": "object",
268
+ "properties": {
269
+ "query": {
270
+ "type": "string",
271
+ "description": "The search query to find relevant information"
272
+ }
273
+ },
274
+ "required": ["query"]
275
+ }
276
+ }]
277
+ }]
278
+
279
+ # System message with explicit tool usage instructions
280
  self.sys_msg = SystemMessage('''You are a general AI assistant. I will ask you a question. Follow these steps:
281
 
282
  1. First, use the google_search tool to find relevant information about the question.
 
296
 
297
  IMPORTANT:
298
  - You MUST use the google_search tool before providing your final answer
299
+ - Format your tool calls as: {"name": "google_search", "arguments": {"query": "your search query"}}
300
  - Your final answer should ONLY be the requested information, no explanations
301
  - If you need to search again, use the tool again
302
  - Do not provide detailed analysis in your final answer
 
345
  raise ValueError("Primary LLM not initialized")
346
 
347
  logger.info("Attempting to use primary LLM (OpenAI)")
348
+
349
+ # Add explicit tool usage prompt
350
+ messages_with_tool_prompt = [self.sys_msg] + messages + [
351
+ HumanMessage(content="Use the google_search tool to find the information. Format your response as a tool call.")
352
+ ]
353
+
354
  response = self.primary_llm.invoke(
355
+ messages_with_tool_prompt,
356
+ tools=self.openai_tools
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
357
  )
358
 
359
  if not response or not hasattr(response, 'content'):
 
365
  return {"messages": [response], "next": "tools"}
366
  else:
367
  # If no tool call, try without tools
368
+ response = self.primary_llm.invoke(messages_with_tool_prompt)
369
  if not response or not hasattr(response, 'content'):
370
  raise ValueError("Invalid response format from OpenAI")
371
  logger.info("Successfully used primary LLM without tools")
 
384
  else:
385
  messages_with_system = [self.sys_msg] + messages
386
 
387
+ # Add explicit tool usage prompt
388
+ messages_with_tool_prompt = messages_with_system + [
389
+ HumanMessage(content="Use the google_search tool to find the information. Format your response as a tool call.")
390
+ ]
 
 
 
 
 
 
 
 
 
 
 
 
 
391
 
392
  response = self.fallback_llm.invoke(
393
+ messages_with_tool_prompt,
394
+ tools=self.gemini_tools
395
  )
396
 
397
  if not response or not hasattr(response, 'content'):
 
403
  return {"messages": [response], "next": "tools"}
404
  else:
405
  # If no tool call, try without tools
406
+ response = self.fallback_llm.invoke(messages_with_tool_prompt)
407
  if not response or not hasattr(response, 'content'):
408
  raise ValueError("Invalid response format from Gemini")
409
  logger.info("Successfully used fallback LLM without tools")
 
435
  if any(phrase in content.lower() for phrase in ["let me", "i'll", "i will", "sure", "okay", "alright"]):
436
  logger.info("Model provided acknowledgment instead of tool call, prompting for search")
437
  return {
438
+ "messages": [AIMessage(content="Please use the google_search tool to find the information.")],
 
 
 
439
  "next": "agent"
440
  }
441
 
 
499
  result = execute_tool(tool_name, tool_args, self.tools)
500
  logger.info(f"Tool result: {result}")
501
 
502
+ # Add tool result to messages
503
  messages.append(AIMessage(content=f"Tool result: {result}"))
504
+
505
+ # If this was a google_search, analyze the results
506
+ if tool_name == "google_search":
507
+ # Add a prompt to analyze the search results
508
+ messages.append(HumanMessage(content="Based on the search results, please provide your final answer."))
509
  except Exception as e:
510
  logger.error(f"Error executing tool {tool_name}: {str(e)}")
511
  messages.append(AIMessage(content=f"Tool error: {str(e)}"))
 
515
  if any(phrase in content for phrase in ["let me", "i'll", "i will", "sure", "okay", "alright"]):
516
  logger.info("No tool calls found, prompting for search")
517
  messages.append(AIMessage(content="Please use the google_search tool to find the information."))
 
518
  else:
519
  logger.info("No tool calls found in AI message")
520
  # If the message looks like a final answer, return it