File size: 2,253 Bytes
621ec47
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8a2ad32
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import os
from hf_api import HuggingFaceAPI
from typing import Optional, List, Dict, Any
from huggingface_hub import InferenceClient, HfApi
from utils import load_settings
    
# Settings paths
SETTINGS_DIR = os.path.join(os.path.dirname(__file__), 'settings')
APP_SETTINGS_FILE = os.path.join(SETTINGS_DIR, 'app.json')

# Get HF token from settings
HF_TOKEN = load_settings(APP_SETTINGS_FILE).get('hf_token')
api = HuggingFaceAPI(HF_TOKEN)

print("Checking available models...")
print("\n1. Testing text generation models:")
models = ["meta-llama/Llama-3.2-3B-Instruct", "microsoft/Phi-3-mini-4k-instruct"]
for model in models:
    try:
        result = api.validate_model(model)
        print(f"   {model}: {'βœ… Available' if result['valid'] else '❌ Not available'}")
    except Exception as e:
        print(f"   {model}: ❌ Error - {str(e)[:50]}...")

print("\n2. Testing translation models:")
models = ["Helsinki-NLP/opus-mt-en-de", "Helsinki-NLP/opus-mt-en-fr"]
for model in models:
    try:
        result = api.validate_model(model)
        print(f"   {model}: {'βœ… Available' if result['valid'] else '❌ Not available'}")
        if not result['valid'] and 'fallback_models' in result:
            print(f"      Fallbacks: {[m['id'] for m in result['fallback_models'][:2]]}")
    except Exception as e:
        print(f"   {model}: ❌ Error - {str(e)[:50]}...")

print("\n3. Testing Google models:")
models = ["google/madlad400-3b-mt", "google/translategemma-12b-it"]
for model in models:
    try:
        result = api.validate_model(model)
        print(f"   {model}: {'βœ… Available' if result['valid'] else '❌ Not available'}")
    except Exception as e:
        print(f"   {model}: ❌ Error - {str(e)[:50]}...")

print("\n4. Testing chat completion with Llama:")
try:
    messages = [{"role": "user", "content": "Translate 'Hello world' to French"}]
    response = api.chat_completion(
        model="meta-llama/Llama-3.2-3B-Instruct",
        messages=messages,
        max_tokens=100
    )
    print(f"   βœ… Chat translation works: {response['choices'][0]['message']['content'][:50]}...")
except Exception as e:
    print(f"   ❌ Chat translation failed: {str(e)[:50]}...")