Spaces:
Runtime error
Runtime error
| type: API | |
| client_name: openai | |
| model_name: ${model.${model.client_name}.model} | |
| openai: | |
| model: "gpt-4o-mini" | |
| api_key: ${oc.env:OPENAI_API_KEY} | |
| available_models: | |
| # 0.000425, fixed, | |
| - gpt-4o-mini | |
| # 0.000213, fixed | |
| - gpt-4o | |
| # 0.000098, max | |
| - gpt-4.1-nano | |
| # 0.000259, max | |
| - gpt-4.1-mini | |
| # 0.00017, fixed | |
| - gpt-4.1 | |
| together: | |
| model: "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo" | |
| api_key: ${oc.env:TOGETHER_API_KEY} | |
| available_models: | |
| - "meta-llama/Llama-Vision-Free" | |
| - "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo" | |
| - "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo" | |
| # moderation models | |
| - "meta-llama/Llama-Guard-3-11B-Vision-Turbo" | |
| groq: | |
| client: | |
| _target_: groq.Groq | |
| async_client: | |
| _target_: groq.AsyncGroq | |
| api_key: ${oc.env:GROQ_API_KEY} | |
| model: "meta-llama/llama-4-scout-17b-16e-instruct" | |
| available_models: | |
| - "meta-llama/llama-4-scout-17b-16e-instruct" |