File size: 5,776 Bytes
1305a78 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 |
#!/usr/bin/env python3
"""
Script to check available Google models on OpenRouter
"""
import os
import asyncio
import aiohttp
import json
async def check_available_models():
"""Check what Google models are available on OpenRouter"""
print("π Checking available models on OpenRouter...")
api_key = os.getenv("OPENROUTER_API_KEY")
if not api_key:
print("β OPENROUTER_API_KEY environment variable not set!")
return
print(f"β
Using API key: {api_key[:10]}...")
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json",
"HTTP-Referer": "https://huggingface.co",
"X-Title": "Document Translator"
}
try:
async with aiohttp.ClientSession() as session:
async with session.get(
"https://openrouter.ai/api/v1/models",
headers=headers
) as response:
if response.status == 200:
data = await response.json()
models = data.get("data", [])
print(f"π Found {len(models)} total models")
# Filter Google models
google_models = [
model for model in models
if "google" in model["id"].lower()
]
print(f"π Found {len(google_models)} Google models:")
for model in google_models:
pricing = model.get("pricing", {})
prompt_price = pricing.get("prompt", "N/A")
completion_price = pricing.get("completion", "N/A")
print(f" β’ {model['id']}")
print(f" Name: {model.get('name', 'N/A')}")
print(f" Context: {model.get('context_length', 'N/A')} tokens")
print(f" Pricing: ${prompt_price}/M input, ${completion_price}/M output")
print()
# Filter specifically for Gemini models
gemini_models = [
model for model in google_models
if "gemini" in model["id"].lower()
]
if gemini_models:
print(f"π― Recommended Gemini models for translation:")
for model in gemini_models[:3]: # Show top 3
print(f" β
{model['id']} - {model.get('name', 'Google Gemini')}")
print(f"\\nπ§ Update your translator.py to use: {gemini_models[0]['id']}")
else:
print("β οΈ No Gemini models found. Consider using other Google models:")
for model in google_models[:3]:
print(f" β’ {model['id']}")
else:
error_text = await response.text()
print(f"β API error: {response.status} - {error_text}")
except Exception as e:
print(f"β Error checking models: {e}")
async def test_specific_model(model_id: str):
"""Test a specific model for translation"""
print(f"\\nπ§ͺ Testing model: {model_id}")
api_key = os.getenv("OPENROUTER_API_KEY")
headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json",
"HTTP-Referer": "https://huggingface.co",
"X-Title": "Document Translator"
}
test_payload = {
"model": model_id,
"messages": [
{"role": "system", "content": "You are a professional translator."},
{"role": "user", "content": "Translate 'Hello world' to Arabic"}
],
"max_tokens": 50,
"temperature": 0.1
}
try:
async with aiohttp.ClientSession() as session:
async with session.post(
"https://openrouter.ai/api/v1/chat/completions",
headers=headers,
json=test_payload
) as response:
if response.status == 200:
data = await response.json()
result = data["choices"][0]["message"]["content"]
print(f"β
Model works! Translation: {result}")
return True
else:
error_text = await response.text()
print(f"β Model test failed: {response.status} - {error_text}")
return False
except Exception as e:
print(f"β Test error: {e}")
return False
async def main():
"""Main function"""
print("π OpenRouter Model Checker\\n")
# Check all available models
await check_available_models()
# Test some common Google models
test_models = [
"google/gemini-pro",
"google/gemini-1.5-pro",
"google/gemini-flash-1.5",
"google/gemini-pro-1.5"
]
print(f"\\nπ§ͺ Testing common Google models...")
working_models = []
for model in test_models:
if await test_specific_model(model):
working_models.append(model)
if working_models:
print(f"\\nπ Working models found:")
for model in working_models:
print(f" β
{model}")
print(f"\\nπ‘ Recommended: Update your code to use '{working_models[0]}'")
else:
print("\\nβ οΈ No working Google models found. Check your API key or try other providers.")
if __name__ == "__main__":
asyncio.run(main()) |