|
|
|
|
|
""" |
|
|
Script to check available Google models on OpenRouter |
|
|
""" |
|
|
|
|
|
import os |
|
|
import asyncio |
|
|
import aiohttp |
|
|
import json |
|
|
|
|
|
async def check_available_models(): |
|
|
"""Check what Google models are available on OpenRouter""" |
|
|
print("π Checking available models on OpenRouter...") |
|
|
|
|
|
api_key = os.getenv("OPENROUTER_API_KEY") |
|
|
if not api_key: |
|
|
print("β OPENROUTER_API_KEY environment variable not set!") |
|
|
return |
|
|
|
|
|
print(f"β
Using API key: {api_key[:10]}...") |
|
|
|
|
|
headers = { |
|
|
"Authorization": f"Bearer {api_key}", |
|
|
"Content-Type": "application/json", |
|
|
"HTTP-Referer": "https://huggingface.co", |
|
|
"X-Title": "Document Translator" |
|
|
} |
|
|
|
|
|
try: |
|
|
async with aiohttp.ClientSession() as session: |
|
|
async with session.get( |
|
|
"https://openrouter.ai/api/v1/models", |
|
|
headers=headers |
|
|
) as response: |
|
|
if response.status == 200: |
|
|
data = await response.json() |
|
|
models = data.get("data", []) |
|
|
|
|
|
print(f"π Found {len(models)} total models") |
|
|
|
|
|
|
|
|
google_models = [ |
|
|
model for model in models |
|
|
if "google" in model["id"].lower() |
|
|
] |
|
|
|
|
|
print(f"π Found {len(google_models)} Google models:") |
|
|
for model in google_models: |
|
|
pricing = model.get("pricing", {}) |
|
|
prompt_price = pricing.get("prompt", "N/A") |
|
|
completion_price = pricing.get("completion", "N/A") |
|
|
print(f" β’ {model['id']}") |
|
|
print(f" Name: {model.get('name', 'N/A')}") |
|
|
print(f" Context: {model.get('context_length', 'N/A')} tokens") |
|
|
print(f" Pricing: ${prompt_price}/M input, ${completion_price}/M output") |
|
|
print() |
|
|
|
|
|
|
|
|
gemini_models = [ |
|
|
model for model in google_models |
|
|
if "gemini" in model["id"].lower() |
|
|
] |
|
|
|
|
|
if gemini_models: |
|
|
print(f"π― Recommended Gemini models for translation:") |
|
|
for model in gemini_models[:3]: |
|
|
print(f" β
{model['id']} - {model.get('name', 'Google Gemini')}") |
|
|
|
|
|
print(f"\\nπ§ Update your translator.py to use: {gemini_models[0]['id']}") |
|
|
else: |
|
|
print("β οΈ No Gemini models found. Consider using other Google models:") |
|
|
for model in google_models[:3]: |
|
|
print(f" β’ {model['id']}") |
|
|
|
|
|
else: |
|
|
error_text = await response.text() |
|
|
print(f"β API error: {response.status} - {error_text}") |
|
|
|
|
|
except Exception as e: |
|
|
print(f"β Error checking models: {e}") |
|
|
|
|
|
async def test_specific_model(model_id: str): |
|
|
"""Test a specific model for translation""" |
|
|
print(f"\\nπ§ͺ Testing model: {model_id}") |
|
|
|
|
|
api_key = os.getenv("OPENROUTER_API_KEY") |
|
|
headers = { |
|
|
"Authorization": f"Bearer {api_key}", |
|
|
"Content-Type": "application/json", |
|
|
"HTTP-Referer": "https://huggingface.co", |
|
|
"X-Title": "Document Translator" |
|
|
} |
|
|
|
|
|
test_payload = { |
|
|
"model": model_id, |
|
|
"messages": [ |
|
|
{"role": "system", "content": "You are a professional translator."}, |
|
|
{"role": "user", "content": "Translate 'Hello world' to Arabic"} |
|
|
], |
|
|
"max_tokens": 50, |
|
|
"temperature": 0.1 |
|
|
} |
|
|
|
|
|
try: |
|
|
async with aiohttp.ClientSession() as session: |
|
|
async with session.post( |
|
|
"https://openrouter.ai/api/v1/chat/completions", |
|
|
headers=headers, |
|
|
json=test_payload |
|
|
) as response: |
|
|
if response.status == 200: |
|
|
data = await response.json() |
|
|
result = data["choices"][0]["message"]["content"] |
|
|
print(f"β
Model works! Translation: {result}") |
|
|
return True |
|
|
else: |
|
|
error_text = await response.text() |
|
|
print(f"β Model test failed: {response.status} - {error_text}") |
|
|
return False |
|
|
except Exception as e: |
|
|
print(f"β Test error: {e}") |
|
|
return False |
|
|
|
|
|
async def main(): |
|
|
"""Main function""" |
|
|
print("π OpenRouter Model Checker\\n") |
|
|
|
|
|
|
|
|
await check_available_models() |
|
|
|
|
|
|
|
|
test_models = [ |
|
|
"google/gemini-pro", |
|
|
"google/gemini-1.5-pro", |
|
|
"google/gemini-flash-1.5", |
|
|
"google/gemini-pro-1.5" |
|
|
] |
|
|
|
|
|
print(f"\\nπ§ͺ Testing common Google models...") |
|
|
working_models = [] |
|
|
|
|
|
for model in test_models: |
|
|
if await test_specific_model(model): |
|
|
working_models.append(model) |
|
|
|
|
|
if working_models: |
|
|
print(f"\\nπ Working models found:") |
|
|
for model in working_models: |
|
|
print(f" β
{model}") |
|
|
print(f"\\nπ‘ Recommended: Update your code to use '{working_models[0]}'") |
|
|
else: |
|
|
print("\\nβ οΈ No working Google models found. Check your API key or try other providers.") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
asyncio.run(main()) |