bstraehle commited on
Commit
3320f03
Β·
verified Β·
1 Parent(s): 4638b66

Update agents/tools/ai_tools.py

Browse files
Files changed (1) hide show
  1. agents/tools/ai_tools.py +38 -0
agents/tools/ai_tools.py CHANGED
@@ -491,6 +491,44 @@ class AITools():
491
  print(f"πŸ€– AITools: algebraic_notation_tool: result={str(e)}")
492
  raise RuntimeError(f"Processing failed: {str(e)}")
493
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
494
  def get_final_answer(question, answer):
495
  print(f"πŸ€– AITools: get_final_answer: question={question}, answer={answer}")
496
 
 
491
  print(f"πŸ€– AITools: algebraic_notation_tool: result={str(e)}")
492
  raise RuntimeError(f"Processing failed: {str(e)}")
493
 
494
+ @tool("Final Answer Tool")
495
+ def final_answer_tool(question: str, answer: str) -> str:
496
+ """Given a question and initial answer, get the final answer.
497
+
498
+ Args:
499
+ question (str): The question
500
+ answer (str): The initial answer
501
+
502
+ Returns:
503
+ str: Final answer
504
+
505
+ Raises:
506
+ RuntimeError: If processing fails
507
+ """
508
+ print(f"πŸ€– AITools: get_final_answer: question={question}, answer={answer}")
509
+
510
+ try:
511
+ client = AITools._get_client()
512
+
513
+ prompt = PROMPT_FINAL_ANSWER.format(question=question, answer=answer)
514
+
515
+ response = client.models.generate_content(
516
+ model=LLM_FINAL_ANSWER,
517
+ contents=[prompt],
518
+ config=types.GenerateContentConfig(
519
+ thinking_config=types.ThinkingConfig(
520
+ thinking_level=THINKING_LEVEL_FINAL_ANSWER
521
+ )
522
+ )
523
+ )
524
+
525
+ result = response.text.strip()
526
+ print(f"πŸ€– AITools: get_final_answer: result={result}")
527
+ return result
528
+ except Exception as e:
529
+ print(f"πŸ€– AITools: get_final_answer: result={str(e)}")
530
+ raise RuntimeError(f"Processing failed: {str(e)}")
531
+
532
  def get_final_answer(question, answer):
533
  print(f"πŸ€– AITools: get_final_answer: question={question}, answer={answer}")
534