#!/usr/bin/env python3 """ Test the Gemini tool calling configuration for document editor """ import os import sys import asyncio from dotenv import load_dotenv # Add parent directory to path sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) from agent_api import LLMConfig from langchain_openai import ChatOpenAI from langchain_google_genai import ChatGoogleGenerativeAI # Load environment variables load_dotenv(dotenv_path=".env", override=False) async def test_llm_config(): """Test the LLMConfig initialization and LLM types""" print("=" * 80) print("๐Ÿงช TESTING GEMINI TOOL CALLING CONFIGURATION") print("=" * 80) # Check environment configuration google_api_key = os.getenv("GOOGLE_API_KEY") gemini_model = os.getenv("GEMINI_TOOL_MODEL") openai_key = os.getenv("OPENAI_API_KEY") openai_model = os.getenv("LLM_MODEL") print(f"\n๐Ÿ“‹ Environment Configuration:") print(f" GOOGLE_API_KEY: {'โœ… Set' if google_api_key else 'โŒ Missing'}") print(f" GEMINI_TOOL_MODEL: {gemini_model or 'โŒ Not set'}") print(f" OPENAI_API_KEY: {'โœ… Set' if openai_key else 'โŒ Missing'}") print(f" LLM_MODEL: {openai_model or 'โŒ Not set'}") if not google_api_key or not openai_key: print("\nโš ๏ธ Skipping test - required environment variables not configured") print(" Required: GOOGLE_API_KEY and OPENAI_API_KEY") return # Initialize LLMConfig print(f"\n๐Ÿš€ Initializing LLMConfig...") try: llm_config = LLMConfig() print("โœ… LLMConfig initialized successfully") except Exception as e: print(f"โŒ Failed to initialize LLMConfig: {str(e)}") import traceback traceback.print_exc() return # Check LLM types print(f"\n๐Ÿ” LLM Types:") print(f" llm_config.openai_llm type: {type(llm_config.openai_llm).__name__}") print(f" llm_config.gemini_llm type: {type(llm_config.gemini_llm).__name__}") # Verify correct types is_openai = isinstance(llm_config.openai_llm, ChatOpenAI) is_gemini = isinstance(llm_config.gemini_llm, ChatGoogleGenerativeAI) print(f"\nโœ… Type Verification:") print(f" OpenAI LLM is ChatOpenAI: {is_openai}") print(f" Gemini LLM is ChatGoogleGenerativeAI: {is_gemini}") if not is_openai or not is_gemini: print(f"\nโŒ Type verification failed!") return # Test simple invocation of both LLMs print(f"\n๐Ÿงช Testing LLM invocation...") # Test OpenAI LLM try: print(f"\n Testing OpenAI LLM...") response = await llm_config.openai_llm.ainvoke("Say 'OpenAI LLM works!'") print(f" โœ… OpenAI LLM response: {response.content[:50]}...") except Exception as e: print(f" โŒ OpenAI LLM failed: {str(e)}") return # Test Gemini LLM try: print(f"\n Testing Gemini LLM...") response = await llm_config.gemini_llm.ainvoke("Say 'Gemini LLM works!'") print(f" โœ… Gemini LLM response: {response.content[:50]}...") except Exception as e: print(f" โŒ Gemini LLM failed: {str(e)}") return # Test DocumentEditorAgent initialization print(f"\n๐Ÿงช Testing DocumentEditorAgent initialization...") try: from agents.doc_editor import DocumentEditorAgent doc_editor = DocumentEditorAgent( llm=llm_config.openai_llm, llm_tool_calling=llm_config.gemini_llm ) print(f"โœ… DocumentEditorAgent initialized successfully") print(f" - Uses {type(doc_editor.llm).__name__} for summary") print(f" - Uses {type(doc_editor.llm_tool_calling).__name__} for tool calling") # Verify tool binding print(f"\n๐Ÿ” Tool binding check:") print(f" - Tools count: {len(doc_editor.tools)}") print(f" - Tool names: {[t.name for t in doc_editor.tools]}") print(f" - Has llm_with_tools: {hasattr(doc_editor, 'llm_with_tools')}") except Exception as e: print(f"โŒ DocumentEditorAgent initialization failed: {str(e)}") import traceback traceback.print_exc() return print("\n" + "=" * 80) print("โœ… ALL TESTS PASSED") print("=" * 80) print("\n๐Ÿ“Š Summary:") print(" - LLMConfig: โœ… Working") print(" - OpenAI LLM: โœ… Working") print(" - Gemini LLM: โœ… Working") print(" - DocumentEditorAgent: โœ… Initialized with both LLMs") print(" - Tool calling: โœ… Configured with Gemini") print(" - Summary generation: โœ… Configured with OpenAI") if __name__ == "__main__": asyncio.run(test_llm_config())