File size: 1,495 Bytes
330b6e4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
#!/usr/bin/env python3
"""

Check which Groq models are currently available.

"""

import os
from dotenv import load_dotenv
from groq import Groq

load_dotenv()

def test_models():
    """Test different Groq models to find supported ones."""
    api_key = os.getenv('GROQ_API_KEY')
    if not api_key:
        print("No API key found")
        return
    
    client = Groq(api_key=api_key)
    
    # Common models to test
    models_to_test = [
        'llama-3.1-70b-versatile',
        'llama-3.1-8b-instant',
        'llama3-70b-8192',
        'llama3-8b-8192',
        'mixtral-8x7b-32768',
        'gemma-7b-it',
        'gemma2-9b-it'
    ]
    
    working_models = []
    
    for model in models_to_test:
        try:
            print(f"Testing {model}...")
            response = client.chat.completions.create(
                messages=[{"role": "user", "content": "Hi"}],
                model=model,
                max_tokens=10
            )
            
            if response.choices:
                print(f"✅ {model} works!")
                working_models.append(model)
            else:
                print(f"❌ {model} - no response")
                
        except Exception as e:
            print(f"❌ {model} - {str(e)[:100]}...")
    
    print(f"\nWorking models: {working_models}")
    if working_models:
        print(f"Recommended: {working_models[0]}")

if __name__ == "__main__":
    test_models()