Spaces:
Running
Running
| #!/usr/bin/env python3 | |
| """ | |
| Test the Gemini tool calling configuration for document editor | |
| """ | |
| import os | |
| import sys | |
| import asyncio | |
| from dotenv import load_dotenv | |
| # Add parent directory to path | |
| sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) | |
| from agent_api import LLMConfig | |
| from langchain_openai import ChatOpenAI | |
| from langchain_google_genai import ChatGoogleGenerativeAI | |
| # Load environment variables | |
| load_dotenv(dotenv_path=".env", override=False) | |
| async def test_llm_config(): | |
| """Test the LLMConfig initialization and LLM types""" | |
| print("=" * 80) | |
| print("π§ͺ TESTING GEMINI TOOL CALLING CONFIGURATION") | |
| print("=" * 80) | |
| # Check environment configuration | |
| google_api_key = os.getenv("GOOGLE_API_KEY") | |
| gemini_model = os.getenv("GEMINI_TOOL_MODEL") | |
| openai_key = os.getenv("OPENAI_API_KEY") | |
| openai_model = os.getenv("LLM_MODEL") | |
| print(f"\nπ Environment Configuration:") | |
| print(f" GOOGLE_API_KEY: {'β Set' if google_api_key else 'β Missing'}") | |
| print(f" GEMINI_TOOL_MODEL: {gemini_model or 'β Not set'}") | |
| print(f" OPENAI_API_KEY: {'β Set' if openai_key else 'β Missing'}") | |
| print(f" LLM_MODEL: {openai_model or 'β Not set'}") | |
| if not google_api_key or not openai_key: | |
| print("\nβ οΈ Skipping test - required environment variables not configured") | |
| print(" Required: GOOGLE_API_KEY and OPENAI_API_KEY") | |
| return | |
| # Initialize LLMConfig | |
| print(f"\nπ Initializing LLMConfig...") | |
| try: | |
| llm_config = LLMConfig() | |
| print("β LLMConfig initialized successfully") | |
| except Exception as e: | |
| print(f"β Failed to initialize LLMConfig: {str(e)}") | |
| import traceback | |
| traceback.print_exc() | |
| return | |
| # Check LLM types | |
| print(f"\nπ LLM Types:") | |
| print(f" llm_config.openai_llm type: {type(llm_config.openai_llm).__name__}") | |
| print(f" llm_config.gemini_llm type: {type(llm_config.gemini_llm).__name__}") | |
| # Verify correct types | |
| is_openai = isinstance(llm_config.openai_llm, ChatOpenAI) | |
| is_gemini = isinstance(llm_config.gemini_llm, ChatGoogleGenerativeAI) | |
| print(f"\nβ Type Verification:") | |
| print(f" OpenAI LLM is ChatOpenAI: {is_openai}") | |
| print(f" Gemini LLM is ChatGoogleGenerativeAI: {is_gemini}") | |
| if not is_openai or not is_gemini: | |
| print(f"\nβ Type verification failed!") | |
| return | |
| # Test simple invocation of both LLMs | |
| print(f"\nπ§ͺ Testing LLM invocation...") | |
| # Test OpenAI LLM | |
| try: | |
| print(f"\n Testing OpenAI LLM...") | |
| response = await llm_config.openai_llm.ainvoke("Say 'OpenAI LLM works!'") | |
| print(f" β OpenAI LLM response: {response.content[:50]}...") | |
| except Exception as e: | |
| print(f" β OpenAI LLM failed: {str(e)}") | |
| return | |
| # Test Gemini LLM | |
| try: | |
| print(f"\n Testing Gemini LLM...") | |
| response = await llm_config.gemini_llm.ainvoke("Say 'Gemini LLM works!'") | |
| print(f" β Gemini LLM response: {response.content[:50]}...") | |
| except Exception as e: | |
| print(f" β Gemini LLM failed: {str(e)}") | |
| return | |
| # Test DocumentEditorAgent initialization | |
| print(f"\nπ§ͺ Testing DocumentEditorAgent initialization...") | |
| try: | |
| from agents.doc_editor import DocumentEditorAgent | |
| doc_editor = DocumentEditorAgent( | |
| llm=llm_config.openai_llm, | |
| llm_tool_calling=llm_config.gemini_llm | |
| ) | |
| print(f"β DocumentEditorAgent initialized successfully") | |
| print(f" - Uses {type(doc_editor.llm).__name__} for summary") | |
| print(f" - Uses {type(doc_editor.llm_tool_calling).__name__} for tool calling") | |
| # Verify tool binding | |
| print(f"\nπ Tool binding check:") | |
| print(f" - Tools count: {len(doc_editor.tools)}") | |
| print(f" - Tool names: {[t.name for t in doc_editor.tools]}") | |
| print(f" - Has llm_with_tools: {hasattr(doc_editor, 'llm_with_tools')}") | |
| except Exception as e: | |
| print(f"β DocumentEditorAgent initialization failed: {str(e)}") | |
| import traceback | |
| traceback.print_exc() | |
| return | |
| print("\n" + "=" * 80) | |
| print("β ALL TESTS PASSED") | |
| print("=" * 80) | |
| print("\nπ Summary:") | |
| print(" - LLMConfig: β Working") | |
| print(" - OpenAI LLM: β Working") | |
| print(" - Gemini LLM: β Working") | |
| print(" - DocumentEditorAgent: β Initialized with both LLMs") | |
| print(" - Tool calling: β Configured with Gemini") | |
| print(" - Summary generation: β Configured with OpenAI") | |
| if __name__ == "__main__": | |
| asyncio.run(test_llm_config()) |