File size: 1,910 Bytes
7c07793
 
 
 
 
 
 
 
 
 
 
 
5a5b115
7c07793
 
 
 
ff520b7
 
7c07793
 
 
 
ff520b7
7c07793
 
 
 
ff520b7
7c07793
 
 
 
ff520b7
7c07793
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5a5b115
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import os
from dotenv import load_dotenv
from agents import AsyncOpenAI, OpenAIChatCompletionsModel

load_dotenv(override=True)


def get_model(model_choice: str):
    """

    Get the appropriate model based on user selection



    Args:

        model_choice: String from radio (e.g., "Gemini 2.5 Flash")



    Returns:

        OpenAIChatCompletionsModel configured for the selected model

    """
    # Map model names to configurations
    if "2.5 Flash" in model_choice:
        model_name = "gemini-2.5-flash"
        api_key = os.getenv('GEMINI_API_KEY')
        base_url = "https://generativelanguage.googleapis.com/v1beta/openai/"

    elif "2.0 Pro" in model_choice:
        model_name = "gemini-2.0-flash-thinking-exp-01-21"
        api_key = os.getenv('GEMINI_API_KEY')
        base_url = "https://generativelanguage.googleapis.com/v1beta/openai/"

    elif "2.0 Flash" in model_choice:
        model_name = "gemini-2.0-flash-exp"
        api_key = os.getenv('GEMINI_API_KEY')
        base_url = "https://generativelanguage.googleapis.com/v1beta/openai/"

    elif "Llama 3.3" in model_choice:
        model_name = "llama-3.3-70b-versatile"
        api_key = os.getenv('GROQ_API_KEY')
        base_url = "https://api.groq.com/openai/v1"

    else:
        # Default to gemini-2.5-flash
        model_name = "gemini-2.5-flash"
        api_key = os.getenv('GEMINI_API_KEY')
        base_url = "https://generativelanguage.googleapis.com/v1beta/openai/"

    # Create client
    client = AsyncOpenAI(
        api_key=api_key,
        base_url=base_url
    )

    # Return model
    return OpenAIChatCompletionsModel(
        model=model_name,
        openai_client=client
    )


def get_model_display_name(model_choice: str) -> str:
    """Get a clean display name for the model"""
    # Already clean, just return as-is
    return model_choice