water3 / agent /test_openrouter.py
onewayto's picture
Upload 187 files
070daf8 verified
#!/usr/bin/env python3
"""Test script to verify OpenRouter integration is working correctly."""
import asyncio
import os
import sys
# Load environment variables from .env file
from dotenv import load_dotenv
load_dotenv()
from litellm import acompletion
async def test_openrouter():
"""Test OpenRouter API connectivity."""
api_key = os.environ.get("OPENROUTER_API_KEY")
if not api_key:
print("ERROR: OPENROUTER_API_KEY not set in environment!")
sys.exit(1)
print(f"OPENROUTER_API_KEY is set: {api_key[:20]}...")
print(f"OPENROUTER_MODEL: {os.environ.get('OPENROUTER_MODEL', 'not set')}")
# Test models to try
test_models = [
"openrouter/anthropic/claude-3.5-sonnet",
"openrouter/anthropic/claude-3-haiku",
"openrouter/meta-llama/llama-3.3-70b-instruct",
]
api_base = "https://openrouter.ai/api/v1"
extra_headers = {
"HTTP-Referer": os.environ.get("OPENROUTER_REFERER", "https://localhost"),
"X-Title": os.environ.get("OPENROUTER_APP_TITLE", "HF Agent Test"),
}
for model in test_models:
print(f"\n{'='*60}")
print(f"Testing model: {model}")
print(f"{'='*60}")
try:
response = await acompletion(
model=model,
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Say 'OpenRouter is working!' and nothing else."}
],
max_tokens=50,
temperature=0.1,
api_key=api_key,
api_base=api_base,
extra_headers=extra_headers,
)
content = response.choices[0].message.content
print(f"βœ… SUCCESS! Response: {content}")
# Also test streaming
print(f"\nTesting streaming with {model}...")
stream_response = await acompletion(
model=model,
messages=[
{"role": "system", "content": "You are a helpful assistant."},
{"role": "user", "content": "Say 'Streaming works!'"}
],
max_tokens=30,
temperature=0.1,
api_key=api_key,
api_base=api_base,
extra_headers=extra_headers,
stream=True,
)
streamed_content = ""
async for chunk in stream_response:
if chunk.choices and chunk.choices[0].delta.content:
streamed_content += chunk.choices[0].delta.content
print(f"βœ… STREAMING SUCCESS! Response: {streamed_content}")
# If we got here, the test passed
print(f"\n{'='*60}")
print(f"All tests passed for {model}!")
print(f"{'='*60}")
return True
except Exception as e:
print(f"❌ FAILED with error: {type(e).__name__}: {e}")
continue
print("\n❌ All model tests failed!")
return False
async def test_config_loading():
"""Test that the config loads correctly."""
print("\n" + "="*60)
print("Testing config loading...")
print("="*60)
try:
from agent.config import load_config
config_path = os.path.join(os.path.dirname(__file__), "configs", "main_agent_config.json")
config = load_config(config_path)
print(f"βœ… Config loaded successfully!")
print(f" model_name: {config.model_name}")
print(f" openrouter_enabled: {config.openrouter_enabled}")
print(f" openrouter_model: {config.openrouter_model}")
return True
except Exception as e:
print(f"❌ Config loading failed: {e}")
return False
async def main():
"""Run all tests."""
print("="*60)
print("OpenRouter Integration Test Suite")
print("="*60)
# Test config loading
config_ok = await test_config_loading()
# Test OpenRouter API
api_ok = await test_openrouter()
print("\n" + "="*60)
print("TEST SUMMARY")
print("="*60)
print(f"Config loading: {'βœ… PASS' if config_ok else '❌ FAIL'}")
print(f"OpenRouter API: {'βœ… PASS' if api_ok else '❌ FAIL'}")
if config_ok and api_ok:
print("\nπŸŽ‰ All tests passed! OpenRouter is ready to use.")
return 0
else:
print("\n⚠️ Some tests failed. Please check the errors above.")
return 1
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)