Spaces:
Runtime error
Runtime error
| import os | |
| from hf_api import HuggingFaceAPI | |
| from typing import Optional, List, Dict, Any | |
| from huggingface_hub import InferenceClient, HfApi | |
| from utils import load_settings | |
| # Settings paths | |
| SETTINGS_DIR = os.path.join(os.path.dirname(__file__), 'settings') | |
| APP_SETTINGS_FILE = os.path.join(SETTINGS_DIR, 'app.json') | |
| # Get HF token from settings | |
| HF_TOKEN = load_settings(APP_SETTINGS_FILE).get('hf_token') | |
| api = HuggingFaceAPI(HF_TOKEN) | |
| print("Checking available models...") | |
| print("\n1. Testing text generation models:") | |
| models = ["meta-llama/Llama-3.2-3B-Instruct", "microsoft/Phi-3-mini-4k-instruct"] | |
| for model in models: | |
| try: | |
| result = api.validate_model(model) | |
| print(f" {model}: {'β Available' if result['valid'] else 'β Not available'}") | |
| except Exception as e: | |
| print(f" {model}: β Error - {str(e)[:50]}...") | |
| print("\n2. Testing translation models:") | |
| models = ["Helsinki-NLP/opus-mt-en-de", "Helsinki-NLP/opus-mt-en-fr"] | |
| for model in models: | |
| try: | |
| result = api.validate_model(model) | |
| print(f" {model}: {'β Available' if result['valid'] else 'β Not available'}") | |
| if not result['valid'] and 'fallback_models' in result: | |
| print(f" Fallbacks: {[m['id'] for m in result['fallback_models'][:2]]}") | |
| except Exception as e: | |
| print(f" {model}: β Error - {str(e)[:50]}...") | |
| print("\n3. Testing Google models:") | |
| models = ["google/madlad400-3b-mt", "google/translategemma-12b-it"] | |
| for model in models: | |
| try: | |
| result = api.validate_model(model) | |
| print(f" {model}: {'β Available' if result['valid'] else 'β Not available'}") | |
| except Exception as e: | |
| print(f" {model}: β Error - {str(e)[:50]}...") | |
| print("\n4. Testing chat completion with Llama:") | |
| try: | |
| messages = [{"role": "user", "content": "Translate 'Hello world' to French"}] | |
| response = api.chat_completion( | |
| model="meta-llama/Llama-3.2-3B-Instruct", | |
| messages=messages, | |
| max_tokens=100 | |
| ) | |
| print(f" β Chat translation works: {response['choices'][0]['message']['content'][:50]}...") | |
| except Exception as e: | |
| print(f" β Chat translation failed: {str(e)[:50]}...") |