| { | |
| "Kwai": { | |
| "vendor": "Kwai", | |
| "total_models": 1, | |
| "free_models": 1, | |
| "paid_models": 0, | |
| "model_names": [ | |
| "Kwaipilot: Kat Coder (free)" | |
| ], | |
| "input_pricing": null, | |
| "output_pricing": null, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 256000, | |
| "max": 256000, | |
| "mean": 256000, | |
| "median": 256000 | |
| } | |
| }, | |
| "Moonshot AI": { | |
| "vendor": "Moonshot AI", | |
| "total_models": 7, | |
| "free_models": 1, | |
| "paid_models": 6, | |
| "model_names": [ | |
| "MoonshotAI: Kimi Linear 48B A3B Instruct", | |
| "MoonshotAI: Kimi K2 Thinking", | |
| "MoonshotAI: Kimi K2 0905", | |
| "MoonshotAI: Kimi K2 0905 (exacto)", | |
| "MoonshotAI: Kimi K2 0711 (free)", | |
| "MoonshotAI: Kimi K2 0711", | |
| "MoonshotAI: Kimi Dev 72B" | |
| ], | |
| "input_pricing": { | |
| "count": 6, | |
| "min": 0.29, | |
| "max": 0.6, | |
| "mean": 0.44666666666666666, | |
| "median": 0.445 | |
| }, | |
| "output_pricing": { | |
| "count": 6, | |
| "min": 0.6, | |
| "max": 2.5, | |
| "mean": 1.8416666666666666, | |
| "median": 2.15 | |
| }, | |
| "context_lengths": { | |
| "count": 7, | |
| "min": 32768, | |
| "max": 1048576, | |
| "mean": 304274.28571428574, | |
| "median": 262144 | |
| } | |
| }, | |
| "OpenRouter": { | |
| "vendor": "OpenRouter", | |
| "total_models": 2, | |
| "free_models": 1, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "Polaris Alpha", | |
| "Auto Router" | |
| ], | |
| "input_pricing": null, | |
| "output_pricing": null, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 256000, | |
| "max": 2000000, | |
| "mean": 1128000, | |
| "median": 1128000.0 | |
| } | |
| }, | |
| "Amazon": { | |
| "vendor": "Amazon", | |
| "total_models": 4, | |
| "free_models": 0, | |
| "paid_models": 4, | |
| "model_names": [ | |
| "Amazon: Nova Premier 1.0", | |
| "Amazon: Nova Lite 1.0", | |
| "Amazon: Nova Micro 1.0", | |
| "Amazon: Nova Pro 1.0" | |
| ], | |
| "input_pricing": { | |
| "count": 4, | |
| "min": 0.035, | |
| "max": 2.5, | |
| "mean": 0.84875, | |
| "median": 0.42999999999999994 | |
| }, | |
| "output_pricing": { | |
| "count": 4, | |
| "min": 0.14, | |
| "max": 12.5, | |
| "mean": 4.02, | |
| "median": 1.7199999999999998 | |
| }, | |
| "context_lengths": { | |
| "count": 4, | |
| "min": 128000, | |
| "max": 1000000, | |
| "mean": 432000, | |
| "median": 300000.0 | |
| } | |
| }, | |
| "Perplexity": { | |
| "vendor": "Perplexity", | |
| "total_models": 6, | |
| "free_models": 0, | |
| "paid_models": 6, | |
| "model_names": [ | |
| "Perplexity: Sonar Pro Search", | |
| "Perplexity: Sonar Reasoning Pro", | |
| "Perplexity: Sonar Pro", | |
| "Perplexity: Sonar Deep Research", | |
| "Perplexity: Sonar Reasoning", | |
| "Perplexity: Sonar" | |
| ], | |
| "input_pricing": { | |
| "count": 6, | |
| "min": 1.0, | |
| "max": 3.0, | |
| "mean": 2.0, | |
| "median": 2.0 | |
| }, | |
| "output_pricing": { | |
| "count": 6, | |
| "min": 1.0, | |
| "max": 15.0, | |
| "mean": 8.666666666666666, | |
| "median": 8.0 | |
| }, | |
| "context_lengths": { | |
| "count": 6, | |
| "min": 127000, | |
| "max": 200000, | |
| "mean": 151678.66666666666, | |
| "median": 128000.0 | |
| } | |
| }, | |
| "Mistral AI": { | |
| "vendor": "Mistral AI", | |
| "total_models": 36, | |
| "free_models": 5, | |
| "paid_models": 31, | |
| "model_names": [ | |
| "Mistral: Voxtral Small 24B 2507", | |
| "Mistral: Mistral Medium 3.1", | |
| "Mistral: Codestral 2508", | |
| "Mistral: Devstral Medium", | |
| "Mistral: Devstral Small 1.1", | |
| "Mistral: Mistral Small 3.2 24B (free)", | |
| "Mistral: Mistral Small 3.2 24B", | |
| "Mistral: Magistral Small 2506", | |
| "Mistral: Magistral Medium 2506 (thinking)", | |
| "Mistral: Magistral Medium 2506", | |
| "Mistral: Devstral Small 2505", | |
| "Mistral: Mistral Medium 3", | |
| "Mistral: Mistral Small 3.1 24B (free)", | |
| "Mistral: Mistral Small 3.1 24B", | |
| "Mistral: Saba", | |
| "Mistral: Mistral Small 3 (free)", | |
| "Mistral: Mistral Small 3", | |
| "Mistral: Codestral 2501", | |
| "Mistral Large 2411", | |
| "Mistral Large 2407", | |
| "Mistral: Pixtral Large 2411", | |
| "Mistral: Ministral 3B", | |
| "Mistral: Ministral 8B", | |
| "Mistral: Pixtral 12B", | |
| "Mistral: Mistral Nemo (free)", | |
| "Mistral: Mistral Nemo", | |
| "Mistral: Mistral 7B Instruct (free)", | |
| "Mistral: Mistral 7B Instruct", | |
| "Mistral: Mistral 7B Instruct v0.3", | |
| "Mistral: Mixtral 8x22B Instruct", | |
| "Mistral Large", | |
| "Mistral Tiny", | |
| "Mistral Small", | |
| "Mistral: Mistral 7B Instruct v0.2", | |
| "Mistral: Mixtral 8x7B Instruct", | |
| "Mistral: Mistral 7B Instruct v0.1" | |
| ], | |
| "input_pricing": { | |
| "count": 31, | |
| "min": 0.02, | |
| "max": 2.0, | |
| "mean": 0.5969677419354839, | |
| "median": 0.19999999999999998 | |
| }, | |
| "output_pricing": { | |
| "count": 31, | |
| "min": 0.04, | |
| "max": 6.0, | |
| "mean": 1.7176774193548388, | |
| "median": 0.54 | |
| }, | |
| "context_lengths": { | |
| "count": 36, | |
| "min": 2824, | |
| "max": 262144, | |
| "mean": 91244.66666666667, | |
| "median": 112000.0 | |
| } | |
| }, | |
| "OpenAI": { | |
| "vendor": "OpenAI", | |
| "total_models": 47, | |
| "free_models": 1, | |
| "paid_models": 46, | |
| "model_names": [ | |
| "OpenAI: gpt-oss-safeguard-20b", | |
| "OpenAI: GPT-5 Image Mini", | |
| "OpenAI: GPT-5 Image", | |
| "OpenAI: o3 Deep Research", | |
| "OpenAI: o4 Mini Deep Research", | |
| "OpenAI: GPT-5 Pro", | |
| "OpenAI: GPT-5 Codex", | |
| "OpenAI: GPT-4o Audio", | |
| "OpenAI: GPT-5 Chat", | |
| "OpenAI: GPT-5", | |
| "OpenAI: GPT-5 Mini", | |
| "OpenAI: GPT-5 Nano", | |
| "OpenAI: gpt-oss-120b", | |
| "OpenAI: gpt-oss-120b (exacto)", | |
| "OpenAI: gpt-oss-20b (free)", | |
| "OpenAI: gpt-oss-20b", | |
| "OpenAI: o3 Pro", | |
| "OpenAI: Codex Mini", | |
| "OpenAI: o4 Mini High", | |
| "OpenAI: o3", | |
| "OpenAI: o4 Mini", | |
| "OpenAI: GPT-4.1", | |
| "OpenAI: GPT-4.1 Mini", | |
| "OpenAI: GPT-4.1 Nano", | |
| "OpenAI: o1-pro", | |
| "OpenAI: GPT-4o-mini Search Preview", | |
| "OpenAI: GPT-4o Search Preview", | |
| "OpenAI: o3 Mini High", | |
| "OpenAI: o3 Mini", | |
| "OpenAI: o1", | |
| "OpenAI: GPT-4o (2024-11-20)", | |
| "OpenAI: ChatGPT-4o", | |
| "OpenAI: GPT-4o (2024-08-06)", | |
| "OpenAI: GPT-4o-mini", | |
| "OpenAI: GPT-4o-mini (2024-07-18)", | |
| "OpenAI: GPT-4o (2024-05-13)", | |
| "OpenAI: GPT-4o", | |
| "OpenAI: GPT-4o (extended)", | |
| "OpenAI: GPT-4 Turbo", | |
| "OpenAI: GPT-4 Turbo Preview", | |
| "OpenAI: GPT-3.5 Turbo (older v0613)", | |
| "OpenAI: GPT-4 Turbo (older v1106)", | |
| "OpenAI: GPT-3.5 Turbo Instruct", | |
| "OpenAI: GPT-3.5 Turbo 16k", | |
| "OpenAI: GPT-4 (older v0314)", | |
| "OpenAI: GPT-4", | |
| "OpenAI: GPT-3.5 Turbo" | |
| ], | |
| "input_pricing": { | |
| "count": 46, | |
| "min": 0.03, | |
| "max": 150.0, | |
| "mean": 7.915108695652174, | |
| "median": 2.0 | |
| }, | |
| "output_pricing": { | |
| "count": 46, | |
| "min": 0.14, | |
| "max": 600.0, | |
| "mean": 28.573478260869564, | |
| "median": 8.0 | |
| }, | |
| "context_lengths": { | |
| "count": 47, | |
| "min": 4095, | |
| "max": 1047576, | |
| "mean": 229264.46808510637, | |
| "median": 131072 | |
| } | |
| }, | |
| "NVIDIA": { | |
| "vendor": "NVIDIA", | |
| "total_models": 7, | |
| "free_models": 2, | |
| "paid_models": 5, | |
| "model_names": [ | |
| "NVIDIA: Nemotron Nano 12B 2 VL (free)", | |
| "NVIDIA: Nemotron Nano 12B 2 VL", | |
| "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5", | |
| "NVIDIA: Nemotron Nano 9B V2 (free)", | |
| "NVIDIA: Nemotron Nano 9B V2", | |
| "NVIDIA: Llama 3.1 Nemotron Ultra 253B v1", | |
| "NVIDIA: Llama 3.1 Nemotron 70B Instruct" | |
| ], | |
| "input_pricing": { | |
| "count": 5, | |
| "min": 0.04, | |
| "max": 0.6, | |
| "mean": 0.308, | |
| "median": 0.19999999999999998 | |
| }, | |
| "output_pricing": { | |
| "count": 5, | |
| "min": 0.16, | |
| "max": 1.7999999999999998, | |
| "mean": 0.712, | |
| "median": 0.6 | |
| }, | |
| "context_lengths": { | |
| "count": 7, | |
| "min": 128000, | |
| "max": 131072, | |
| "mean": 130194.28571428571, | |
| "median": 131072 | |
| } | |
| }, | |
| "Minimax": { | |
| "vendor": "Minimax", | |
| "total_models": 4, | |
| "free_models": 1, | |
| "paid_models": 3, | |
| "model_names": [ | |
| "MiniMax: MiniMax M2 (free)", | |
| "MiniMax: MiniMax M2", | |
| "MiniMax: MiniMax M1", | |
| "MiniMax: MiniMax-01" | |
| ], | |
| "input_pricing": { | |
| "count": 3, | |
| "min": 0.19999999999999998, | |
| "max": 0.39999999999999997, | |
| "mean": 0.285, | |
| "median": 0.255 | |
| }, | |
| "output_pricing": { | |
| "count": 3, | |
| "min": 1.02, | |
| "max": 2.2, | |
| "mean": 1.4400000000000002, | |
| "median": 1.1 | |
| }, | |
| "context_lengths": { | |
| "count": 4, | |
| "min": 204800, | |
| "max": 1000192, | |
| "mean": 602448, | |
| "median": 602400.0 | |
| } | |
| }, | |
| "Liquid AI": { | |
| "vendor": "Liquid AI", | |
| "total_models": 2, | |
| "free_models": 0, | |
| "paid_models": 2, | |
| "model_names": [ | |
| "LiquidAI/LFM2-8B-A1B", | |
| "LiquidAI/LFM2-2.6B" | |
| ], | |
| "input_pricing": { | |
| "count": 2, | |
| "min": 0.049999999999999996, | |
| "max": 0.049999999999999996, | |
| "mean": 0.049999999999999996, | |
| "median": 0.049999999999999996 | |
| }, | |
| "output_pricing": { | |
| "count": 2, | |
| "min": 0.09999999999999999, | |
| "max": 0.09999999999999999, | |
| "mean": 0.09999999999999999, | |
| "median": 0.09999999999999999 | |
| }, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 32768, | |
| "max": 32768, | |
| "mean": 32768, | |
| "median": 32768.0 | |
| } | |
| }, | |
| "IBM": { | |
| "vendor": "IBM", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "IBM: Granite 4.0 Micro" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.017, | |
| "max": 0.017, | |
| "mean": 0.017, | |
| "median": 0.017 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.11, | |
| "max": 0.11, | |
| "mean": 0.11, | |
| "median": 0.11 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 131000, | |
| "max": 131000, | |
| "mean": 131000, | |
| "median": 131000 | |
| } | |
| }, | |
| "DeepCogito": { | |
| "vendor": "DeepCogito", | |
| "total_models": 4, | |
| "free_models": 0, | |
| "paid_models": 4, | |
| "model_names": [ | |
| "Deep Cogito: Cogito V2 Preview Llama 405B", | |
| "Deep Cogito: Cogito V2 Preview Llama 70B", | |
| "Cogito V2 Preview Llama 109B", | |
| "Deep Cogito: Cogito V2 Preview Deepseek 671B" | |
| ], | |
| "input_pricing": { | |
| "count": 4, | |
| "min": 0.18, | |
| "max": 3.5, | |
| "mean": 1.4525, | |
| "median": 1.065 | |
| }, | |
| "output_pricing": { | |
| "count": 4, | |
| "min": 0.59, | |
| "max": 3.5, | |
| "mean": 1.555, | |
| "median": 1.065 | |
| }, | |
| "context_lengths": { | |
| "count": 4, | |
| "min": 32767, | |
| "max": 163840, | |
| "mean": 65535.75, | |
| "median": 32768.0 | |
| } | |
| }, | |
| "Anthropic": { | |
| "vendor": "Anthropic", | |
| "total_models": 13, | |
| "free_models": 0, | |
| "paid_models": 13, | |
| "model_names": [ | |
| "Anthropic: Claude Haiku 4.5", | |
| "Anthropic: Claude Sonnet 4.5", | |
| "Anthropic: Claude Opus 4.1", | |
| "Anthropic: Claude Opus 4", | |
| "Anthropic: Claude Sonnet 4", | |
| "Anthropic: Claude 3.7 Sonnet (thinking)", | |
| "Anthropic: Claude 3.7 Sonnet", | |
| "Anthropic: Claude 3.5 Haiku", | |
| "Anthropic: Claude 3.5 Haiku (2024-10-22)", | |
| "Anthropic: Claude 3.5 Sonnet", | |
| "Anthropic: Claude 3.5 Sonnet (2024-06-20)", | |
| "Anthropic: Claude 3 Haiku", | |
| "Anthropic: Claude 3 Opus" | |
| ], | |
| "input_pricing": { | |
| "count": 13, | |
| "min": 0.25, | |
| "max": 15.0, | |
| "mean": 5.065384615384615, | |
| "median": 3.0 | |
| }, | |
| "output_pricing": { | |
| "count": 13, | |
| "min": 1.25, | |
| "max": 75.0, | |
| "mean": 25.326923076923077, | |
| "median": 15.0 | |
| }, | |
| "context_lengths": { | |
| "count": 13, | |
| "min": 200000, | |
| "max": 1000000, | |
| "mean": 323076.92307692306, | |
| "median": 200000 | |
| } | |
| }, | |
| "Qwen": { | |
| "vendor": "Qwen", | |
| "total_models": 46, | |
| "free_models": 8, | |
| "paid_models": 38, | |
| "model_names": [ | |
| "Qwen: Qwen3 VL 8B Thinking", | |
| "Qwen: Qwen3 VL 8B Instruct", | |
| "Qwen: Qwen3 VL 30B A3B Thinking", | |
| "Qwen: Qwen3 VL 30B A3B Instruct", | |
| "Qwen: Qwen3 VL 235B A22B Thinking", | |
| "Qwen: Qwen3 VL 235B A22B Instruct", | |
| "Qwen: Qwen3 Max", | |
| "Qwen: Qwen3 Coder Plus", | |
| "Qwen: Qwen3 Coder Flash", | |
| "Qwen: Qwen3 Next 80B A3B Thinking", | |
| "Qwen: Qwen3 Next 80B A3B Instruct", | |
| "Qwen: Qwen Plus 0728", | |
| "Qwen: Qwen Plus 0728 (thinking)", | |
| "Qwen: Qwen3 30B A3B Thinking 2507", | |
| "Qwen: Qwen3 Coder 30B A3B Instruct", | |
| "Qwen: Qwen3 30B A3B Instruct 2507", | |
| "Qwen: Qwen3 235B A22B Thinking 2507", | |
| "Qwen: Qwen3 Coder 480B A35B (free)", | |
| "Qwen: Qwen3 Coder 480B A35B", | |
| "Qwen: Qwen3 Coder 480B A35B (exacto)", | |
| "Qwen: Qwen3 235B A22B Instruct 2507", | |
| "Qwen: Qwen3 4B (free)", | |
| "Qwen: Qwen3 30B A3B (free)", | |
| "Qwen: Qwen3 30B A3B", | |
| "Qwen: Qwen3 8B", | |
| "Qwen: Qwen3 14B (free)", | |
| "Qwen: Qwen3 14B", | |
| "Qwen: Qwen3 32B", | |
| "Qwen: Qwen3 235B A22B (free)", | |
| "Qwen: Qwen3 235B A22B", | |
| "Qwen: Qwen2.5 Coder 7B Instruct", | |
| "Qwen: Qwen2.5 VL 32B Instruct (free)", | |
| "Qwen: Qwen2.5 VL 32B Instruct", | |
| "Qwen: QwQ 32B", | |
| "Qwen: Qwen VL Plus", | |
| "Qwen: Qwen VL Max", | |
| "Qwen: Qwen-Turbo", | |
| "Qwen: Qwen2.5 VL 72B Instruct", | |
| "Qwen: Qwen-Plus", | |
| "Qwen: Qwen-Max ", | |
| "Qwen2.5 Coder 32B Instruct (free)", | |
| "Qwen2.5 Coder 32B Instruct", | |
| "Qwen: Qwen2.5 7B Instruct", | |
| "Qwen2.5 72B Instruct (free)", | |
| "Qwen2.5 72B Instruct", | |
| "Qwen: Qwen2.5-VL 7B Instruct" | |
| ], | |
| "input_pricing": { | |
| "count": 38, | |
| "min": 0.03, | |
| "max": 1.5999999999999999, | |
| "mean": 0.2577631578947368, | |
| "median": 0.15 | |
| }, | |
| "output_pricing": { | |
| "count": 38, | |
| "min": 0.09, | |
| "max": 6.3999999999999995, | |
| "mean": 1.189421052631579, | |
| "median": 0.575 | |
| }, | |
| "context_lengths": { | |
| "count": 46, | |
| "min": 7500, | |
| "max": 1000000, | |
| "mean": 185296.60869565216, | |
| "median": 128000.0 | |
| } | |
| }, | |
| "Inclusion AI": { | |
| "vendor": "Inclusion AI", | |
| "total_models": 2, | |
| "free_models": 0, | |
| "paid_models": 2, | |
| "model_names": [ | |
| "inclusionAI: Ring 1T", | |
| "inclusionAI: Ling-1T" | |
| ], | |
| "input_pricing": { | |
| "count": 2, | |
| "min": 0.5700000000000001, | |
| "max": 0.5700000000000001, | |
| "mean": 0.5700000000000001, | |
| "median": 0.5700000000000001 | |
| }, | |
| "output_pricing": { | |
| "count": 2, | |
| "min": 2.2800000000000002, | |
| "max": 2.2800000000000002, | |
| "mean": 2.2800000000000002, | |
| "median": 2.2800000000000002 | |
| }, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 131072, | |
| "max": 131072, | |
| "mean": 131072, | |
| "median": 131072.0 | |
| } | |
| }, | |
| "Baidu": { | |
| "vendor": "Baidu", | |
| "total_models": 5, | |
| "free_models": 0, | |
| "paid_models": 5, | |
| "model_names": [ | |
| "Baidu: ERNIE 4.5 21B A3B Thinking", | |
| "Baidu: ERNIE 4.5 21B A3B", | |
| "Baidu: ERNIE 4.5 VL 28B A3B", | |
| "Baidu: ERNIE 4.5 VL 424B A47B ", | |
| "Baidu: ERNIE 4.5 300B A47B " | |
| ], | |
| "input_pricing": { | |
| "count": 5, | |
| "min": 0.07, | |
| "max": 0.42, | |
| "mean": 0.196, | |
| "median": 0.14 | |
| }, | |
| "output_pricing": { | |
| "count": 5, | |
| "min": 0.28, | |
| "max": 1.25, | |
| "mean": 0.6940000000000001, | |
| "median": 0.56 | |
| }, | |
| "context_lengths": { | |
| "count": 5, | |
| "min": 30000, | |
| "max": 131072, | |
| "mean": 105414.4, | |
| "median": 123000 | |
| } | |
| }, | |
| "Google": { | |
| "vendor": "Google", | |
| "total_models": 24, | |
| "free_models": 6, | |
| "paid_models": 18, | |
| "model_names": [ | |
| "Google: Gemini 2.5 Flash Image (Nano Banana)", | |
| "Google: Gemini 2.5 Flash Preview 09-2025", | |
| "Google: Gemini 2.5 Flash Lite Preview 09-2025", | |
| "Google: Gemini 2.5 Flash Image Preview (Nano Banana)", | |
| "Google: Gemini 2.5 Flash Lite", | |
| "Google: Gemma 3n 2B (free)", | |
| "Google: Gemini 2.5 Flash Lite Preview 06-17", | |
| "Google: Gemini 2.5 Flash", | |
| "Google: Gemini 2.5 Pro", | |
| "Google: Gemini 2.5 Pro Preview 06-05", | |
| "Google: Gemma 3n 4B (free)", | |
| "Google: Gemma 3n 4B", | |
| "Google: Gemini 2.5 Pro Preview 05-06", | |
| "Google: Gemma 3 4B (free)", | |
| "Google: Gemma 3 4B", | |
| "Google: Gemma 3 12B (free)", | |
| "Google: Gemma 3 12B", | |
| "Google: Gemma 3 27B (free)", | |
| "Google: Gemma 3 27B", | |
| "Google: Gemini 2.0 Flash Lite", | |
| "Google: Gemini 2.0 Flash", | |
| "Google: Gemini 2.0 Flash Experimental (free)", | |
| "Google: Gemma 2 27B", | |
| "Google: Gemma 2 9B" | |
| ], | |
| "input_pricing": { | |
| "count": 18, | |
| "min": 0.01703012, | |
| "max": 1.25, | |
| "mean": 0.3478905622222222, | |
| "median": 0.09999999999999999 | |
| }, | |
| "output_pricing": { | |
| "count": 18, | |
| "min": 0.04, | |
| "max": 10.0, | |
| "mean": 2.389341866666667, | |
| "median": 0.39999999999999997 | |
| }, | |
| "context_lengths": { | |
| "count": 24, | |
| "min": 8192, | |
| "max": 1048576, | |
| "mean": 509173.3333333333, | |
| "median": 131072.0 | |
| } | |
| }, | |
| "Z-AI": { | |
| "vendor": "Z-AI", | |
| "total_models": 7, | |
| "free_models": 1, | |
| "paid_models": 6, | |
| "model_names": [ | |
| "Z.AI: GLM 4.6", | |
| "Z.AI: GLM 4.6 (exacto)", | |
| "Z.AI: GLM 4.5V", | |
| "Z.AI: GLM 4.5", | |
| "Z.AI: GLM 4.5 Air (free)", | |
| "Z.AI: GLM 4.5 Air", | |
| "Z.AI: GLM 4 32B " | |
| ], | |
| "input_pricing": { | |
| "count": 6, | |
| "min": 0.09999999999999999, | |
| "max": 0.6, | |
| "mean": 0.3383333333333333, | |
| "median": 0.375 | |
| }, | |
| "output_pricing": { | |
| "count": 6, | |
| "min": 0.09999999999999999, | |
| "max": 1.9, | |
| "mean": 1.325, | |
| "median": 1.65 | |
| }, | |
| "context_lengths": { | |
| "count": 7, | |
| "min": 65536, | |
| "max": 202752, | |
| "mean": 141750.85714285713, | |
| "median": 131072 | |
| } | |
| }, | |
| "DeepSeek": { | |
| "vendor": "DeepSeek", | |
| "total_models": 19, | |
| "free_models": 6, | |
| "paid_models": 13, | |
| "model_names": [ | |
| "DeepSeek: DeepSeek V3.2 Exp", | |
| "DeepSeek: DeepSeek V3.1 Terminus", | |
| "DeepSeek: DeepSeek V3.1 Terminus (exacto)", | |
| "DeepSeek: DeepSeek V3.1 (free)", | |
| "DeepSeek: DeepSeek V3.1", | |
| "DeepSeek: DeepSeek R1 0528 Qwen3 8B (free)", | |
| "DeepSeek: DeepSeek R1 0528 Qwen3 8B", | |
| "DeepSeek: R1 0528 (free)", | |
| "DeepSeek: R1 0528", | |
| "DeepSeek: DeepSeek Prover V2", | |
| "DeepSeek: DeepSeek V3 0324 (free)", | |
| "DeepSeek: DeepSeek V3 0324", | |
| "DeepSeek: R1 Distill Qwen 32B", | |
| "DeepSeek: R1 Distill Qwen 14B", | |
| "DeepSeek: R1 Distill Llama 70B (free)", | |
| "DeepSeek: R1 Distill Llama 70B", | |
| "DeepSeek: R1 (free)", | |
| "DeepSeek: R1", | |
| "DeepSeek: DeepSeek V3" | |
| ], | |
| "input_pricing": { | |
| "count": 13, | |
| "min": 0.02, | |
| "max": 0.5, | |
| "mean": 0.24461538461538462, | |
| "median": 0.27 | |
| }, | |
| "output_pricing": { | |
| "count": 13, | |
| "min": 0.09999999999999999, | |
| "max": 2.1799999999999997, | |
| "mean": 0.84, | |
| "median": 0.84 | |
| }, | |
| "context_lengths": { | |
| "count": 19, | |
| "min": 8192, | |
| "max": 163840, | |
| "mean": 134950.31578947368, | |
| "median": 163840 | |
| } | |
| }, | |
| "TheDrummer": { | |
| "vendor": "TheDrummer", | |
| "total_models": 5, | |
| "free_models": 0, | |
| "paid_models": 5, | |
| "model_names": [ | |
| "TheDrummer: Cydonia 24B V4.1", | |
| "TheDrummer: Anubis 70B V1.1", | |
| "TheDrummer: Skyfall 36B V2", | |
| "TheDrummer: UnslopNemo 12B", | |
| "TheDrummer: Rocinante 12B" | |
| ], | |
| "input_pricing": { | |
| "count": 5, | |
| "min": 0.16999999999999998, | |
| "max": 0.65, | |
| "mean": 0.40399999999999997, | |
| "median": 0.39999999999999997 | |
| }, | |
| "output_pricing": { | |
| "count": 5, | |
| "min": 0.39999999999999997, | |
| "max": 1.0, | |
| "mean": 0.626, | |
| "median": 0.5 | |
| }, | |
| "context_lengths": { | |
| "count": 5, | |
| "min": 32768, | |
| "max": 131072, | |
| "mean": 72089.6, | |
| "median": 32768 | |
| } | |
| }, | |
| "Relace": { | |
| "vendor": "Relace", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "Relace: Relace Apply 3" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.85, | |
| "max": 0.85, | |
| "mean": 0.85, | |
| "median": 0.85 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 1.25, | |
| "max": 1.25, | |
| "mean": 1.25, | |
| "median": 1.25 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 256000, | |
| "max": 256000, | |
| "mean": 256000, | |
| "median": 256000 | |
| } | |
| }, | |
| "xAI": { | |
| "vendor": "xAI", | |
| "total_models": 7, | |
| "free_models": 0, | |
| "paid_models": 7, | |
| "model_names": [ | |
| "xAI: Grok 4 Fast", | |
| "xAI: Grok Code Fast 1", | |
| "xAI: Grok 4", | |
| "xAI: Grok 3 Mini", | |
| "xAI: Grok 3", | |
| "xAI: Grok 3 Mini Beta", | |
| "xAI: Grok 3 Beta" | |
| ], | |
| "input_pricing": { | |
| "count": 7, | |
| "min": 0.19999999999999998, | |
| "max": 3.0, | |
| "mean": 1.4285714285714286, | |
| "median": 0.3 | |
| }, | |
| "output_pricing": { | |
| "count": 7, | |
| "min": 0.5, | |
| "max": 15.0, | |
| "mean": 6.857142857142857, | |
| "median": 1.5 | |
| }, | |
| "context_lengths": { | |
| "count": 7, | |
| "min": 131072, | |
| "max": 2000000, | |
| "mean": 433755.4285714286, | |
| "median": 131072 | |
| } | |
| }, | |
| "Alibaba": { | |
| "vendor": "Alibaba", | |
| "total_models": 2, | |
| "free_models": 1, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "Tongyi DeepResearch 30B A3B (free)", | |
| "Tongyi DeepResearch 30B A3B" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.09, | |
| "max": 0.09, | |
| "mean": 0.09, | |
| "median": 0.09 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.39999999999999997, | |
| "max": 0.39999999999999997, | |
| "mean": 0.39999999999999997, | |
| "median": 0.39999999999999997 | |
| }, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 131072, | |
| "max": 131072, | |
| "mean": 131072, | |
| "median": 131072.0 | |
| } | |
| }, | |
| "Arcee AI": { | |
| "vendor": "Arcee AI", | |
| "total_models": 5, | |
| "free_models": 0, | |
| "paid_models": 5, | |
| "model_names": [ | |
| "Arcee AI: AFM 4.5B", | |
| "Arcee AI: Spotlight", | |
| "Arcee AI: Maestro Reasoning", | |
| "Arcee AI: Virtuoso Large", | |
| "Arcee AI: Coder Large" | |
| ], | |
| "input_pricing": { | |
| "count": 5, | |
| "min": 0.048, | |
| "max": 0.8999999999999999, | |
| "mean": 0.47559999999999997, | |
| "median": 0.5 | |
| }, | |
| "output_pricing": { | |
| "count": 5, | |
| "min": 0.15, | |
| "max": 3.3000000000000003, | |
| "mean": 1.1260000000000001, | |
| "median": 0.7999999999999999 | |
| }, | |
| "context_lengths": { | |
| "count": 5, | |
| "min": 32768, | |
| "max": 131072, | |
| "mean": 98304, | |
| "median": 131072 | |
| } | |
| }, | |
| "OpenGVLab": { | |
| "vendor": "OpenGVLab", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "OpenGVLab: InternVL3 78B" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.07, | |
| "max": 0.07, | |
| "mean": 0.07, | |
| "median": 0.07 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.26, | |
| "max": 0.26, | |
| "mean": 0.26, | |
| "median": 0.26 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 32768, | |
| "max": 32768, | |
| "mean": 32768, | |
| "median": 32768 | |
| } | |
| }, | |
| "Meituan": { | |
| "vendor": "Meituan", | |
| "total_models": 2, | |
| "free_models": 1, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "Meituan: LongCat Flash Chat (free)", | |
| "Meituan: LongCat Flash Chat" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.15, | |
| "max": 0.15, | |
| "mean": 0.15, | |
| "median": 0.15 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.75, | |
| "max": 0.75, | |
| "mean": 0.75, | |
| "median": 0.75 | |
| }, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 131072, | |
| "max": 131072, | |
| "mean": 131072, | |
| "median": 131072.0 | |
| } | |
| }, | |
| "Stepfun AI": { | |
| "vendor": "Stepfun AI", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "StepFun: Step3" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.5700000000000001, | |
| "max": 0.5700000000000001, | |
| "mean": 0.5700000000000001, | |
| "median": 0.5700000000000001 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 1.42, | |
| "max": 1.42, | |
| "mean": 1.42, | |
| "median": 1.42 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 65536, | |
| "max": 65536, | |
| "mean": 65536, | |
| "median": 65536 | |
| } | |
| }, | |
| "Nous Research": { | |
| "vendor": "Nous Research", | |
| "total_models": 7, | |
| "free_models": 1, | |
| "paid_models": 6, | |
| "model_names": [ | |
| "Nous: Hermes 4 70B", | |
| "Nous: Hermes 4 405B", | |
| "Nous: DeepHermes 3 Mistral 24B Preview", | |
| "Nous: Hermes 3 70B Instruct", | |
| "Nous: Hermes 3 405B Instruct (free)", | |
| "Nous: Hermes 3 405B Instruct", | |
| "NousResearch: Hermes 2 Pro - Llama-3 8B" | |
| ], | |
| "input_pricing": { | |
| "count": 6, | |
| "min": 0.024999999999999998, | |
| "max": 1.0, | |
| "mean": 0.31416666666666665, | |
| "median": 0.22499999999999998 | |
| }, | |
| "output_pricing": { | |
| "count": 6, | |
| "min": 0.08, | |
| "max": 1.2, | |
| "mean": 0.5916666666666667, | |
| "median": 0.485 | |
| }, | |
| "context_lengths": { | |
| "count": 7, | |
| "min": 32768, | |
| "max": 131072, | |
| "mean": 93622.85714285714, | |
| "median": 131072 | |
| } | |
| }, | |
| "AI21 Labs": { | |
| "vendor": "AI21 Labs", | |
| "total_models": 2, | |
| "free_models": 0, | |
| "paid_models": 2, | |
| "model_names": [ | |
| "AI21: Jamba Mini 1.7", | |
| "AI21: Jamba Large 1.7" | |
| ], | |
| "input_pricing": { | |
| "count": 2, | |
| "min": 0.19999999999999998, | |
| "max": 2.0, | |
| "mean": 1.1, | |
| "median": 1.1 | |
| }, | |
| "output_pricing": { | |
| "count": 2, | |
| "min": 0.39999999999999997, | |
| "max": 8.0, | |
| "mean": 4.2, | |
| "median": 4.2 | |
| }, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 256000, | |
| "max": 256000, | |
| "mean": 256000, | |
| "median": 256000.0 | |
| } | |
| }, | |
| "ByteDance": { | |
| "vendor": "ByteDance", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "ByteDance: UI-TARS 7B " | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.09999999999999999, | |
| "max": 0.09999999999999999, | |
| "mean": 0.09999999999999999, | |
| "median": 0.09999999999999999 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.19999999999999998, | |
| "max": 0.19999999999999998, | |
| "mean": 0.19999999999999998, | |
| "median": 0.19999999999999998 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 128000, | |
| "max": 128000, | |
| "mean": 128000, | |
| "median": 128000 | |
| } | |
| }, | |
| "Switchpoint": { | |
| "vendor": "Switchpoint", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "Switchpoint Router" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.85, | |
| "max": 0.85, | |
| "mean": 0.85, | |
| "median": 0.85 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 3.4, | |
| "max": 3.4, | |
| "mean": 3.4, | |
| "median": 3.4 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 131072, | |
| "max": 131072, | |
| "mean": 131072, | |
| "median": 131072 | |
| } | |
| }, | |
| "THUDM": { | |
| "vendor": "THUDM", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "THUDM: GLM 4.1V 9B Thinking" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.035, | |
| "max": 0.035, | |
| "mean": 0.035, | |
| "median": 0.035 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.13799999999999998, | |
| "max": 0.13799999999999998, | |
| "mean": 0.13799999999999998, | |
| "median": 0.13799999999999998 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 65536, | |
| "max": 65536, | |
| "mean": 65536, | |
| "median": 65536 | |
| } | |
| }, | |
| "Cognitive Computations": { | |
| "vendor": "Cognitive Computations", | |
| "total_models": 1, | |
| "free_models": 1, | |
| "paid_models": 0, | |
| "model_names": [ | |
| "Venice: Uncensored (free)" | |
| ], | |
| "input_pricing": null, | |
| "output_pricing": null, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 32768, | |
| "max": 32768, | |
| "mean": 32768, | |
| "median": 32768 | |
| } | |
| }, | |
| "Tencent": { | |
| "vendor": "Tencent", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "Tencent: Hunyuan A13B Instruct" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.14, | |
| "max": 0.14, | |
| "mean": 0.14, | |
| "median": 0.14 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.5700000000000001, | |
| "max": 0.5700000000000001, | |
| "mean": 0.5700000000000001, | |
| "median": 0.5700000000000001 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 131072, | |
| "max": 131072, | |
| "mean": 131072, | |
| "median": 131072 | |
| } | |
| }, | |
| "TNG Technology": { | |
| "vendor": "TNG Technology", | |
| "total_models": 4, | |
| "free_models": 2, | |
| "paid_models": 2, | |
| "model_names": [ | |
| "TNG: DeepSeek R1T2 Chimera (free)", | |
| "TNG: DeepSeek R1T2 Chimera", | |
| "TNG: DeepSeek R1T Chimera (free)", | |
| "TNG: DeepSeek R1T Chimera" | |
| ], | |
| "input_pricing": { | |
| "count": 2, | |
| "min": 0.3, | |
| "max": 0.3, | |
| "mean": 0.3, | |
| "median": 0.3 | |
| }, | |
| "output_pricing": { | |
| "count": 2, | |
| "min": 1.2, | |
| "max": 1.2, | |
| "mean": 1.2, | |
| "median": 1.2 | |
| }, | |
| "context_lengths": { | |
| "count": 4, | |
| "min": 163840, | |
| "max": 163840, | |
| "mean": 163840, | |
| "median": 163840.0 | |
| } | |
| }, | |
| "Morph": { | |
| "vendor": "Morph", | |
| "total_models": 2, | |
| "free_models": 0, | |
| "paid_models": 2, | |
| "model_names": [ | |
| "Morph: Morph V3 Large", | |
| "Morph: Morph V3 Fast" | |
| ], | |
| "input_pricing": { | |
| "count": 2, | |
| "min": 0.7999999999999999, | |
| "max": 0.8999999999999999, | |
| "mean": 0.8499999999999999, | |
| "median": 0.8499999999999999 | |
| }, | |
| "output_pricing": { | |
| "count": 2, | |
| "min": 1.2, | |
| "max": 1.9, | |
| "mean": 1.5499999999999998, | |
| "median": 1.5499999999999998 | |
| }, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 81920, | |
| "max": 262144, | |
| "mean": 172032, | |
| "median": 172032.0 | |
| } | |
| }, | |
| "Inception": { | |
| "vendor": "Inception", | |
| "total_models": 2, | |
| "free_models": 0, | |
| "paid_models": 2, | |
| "model_names": [ | |
| "Inception: Mercury", | |
| "Inception: Mercury Coder" | |
| ], | |
| "input_pricing": { | |
| "count": 2, | |
| "min": 0.25, | |
| "max": 0.25, | |
| "mean": 0.25, | |
| "median": 0.25 | |
| }, | |
| "output_pricing": { | |
| "count": 2, | |
| "min": 1.0, | |
| "max": 1.0, | |
| "mean": 1.0, | |
| "median": 1.0 | |
| }, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 128000, | |
| "max": 128000, | |
| "mean": 128000, | |
| "median": 128000.0 | |
| } | |
| }, | |
| "Meta": { | |
| "vendor": "Meta", | |
| "total_models": 21, | |
| "free_models": 5, | |
| "paid_models": 16, | |
| "model_names": [ | |
| "Meta: Llama 3.3 8B Instruct (free)", | |
| "Meta: Llama Guard 4 12B", | |
| "Meta: Llama 4 Maverick (free)", | |
| "Meta: Llama 4 Maverick", | |
| "Meta: Llama 4 Scout (free)", | |
| "Meta: Llama 4 Scout", | |
| "Llama Guard 3 8B", | |
| "Meta: Llama 3.3 70B Instruct (free)", | |
| "Meta: Llama 3.3 70B Instruct", | |
| "Meta: Llama 3.2 1B Instruct", | |
| "Meta: Llama 3.2 3B Instruct (free)", | |
| "Meta: Llama 3.2 3B Instruct", | |
| "Meta: Llama 3.2 11B Vision Instruct", | |
| "Meta: Llama 3.2 90B Vision Instruct", | |
| "Meta: Llama 3.1 405B (base)", | |
| "Meta: Llama 3.1 8B Instruct", | |
| "Meta: Llama 3.1 405B Instruct", | |
| "Meta: Llama 3.1 70B Instruct", | |
| "Meta: LlamaGuard 2 8B", | |
| "Meta: Llama 3 70B Instruct", | |
| "Meta: Llama 3 8B Instruct" | |
| ], | |
| "input_pricing": { | |
| "count": 16, | |
| "min": 0.005, | |
| "max": 4.0, | |
| "mean": 0.420875, | |
| "median": 0.14 | |
| }, | |
| "output_pricing": { | |
| "count": 16, | |
| "min": 0.01, | |
| "max": 4.0, | |
| "mean": 0.49306249999999996, | |
| "median": 0.25 | |
| }, | |
| "context_lengths": { | |
| "count": 21, | |
| "min": 8192, | |
| "max": 1048576, | |
| "mean": 153648.7619047619, | |
| "median": 131072 | |
| } | |
| }, | |
| "Microsoft": { | |
| "vendor": "Microsoft", | |
| "total_models": 9, | |
| "free_models": 1, | |
| "paid_models": 8, | |
| "model_names": [ | |
| "Microsoft: Phi 4 Reasoning Plus", | |
| "Microsoft: MAI DS R1 (free)", | |
| "Microsoft: MAI DS R1", | |
| "Microsoft: Phi 4 Multimodal Instruct", | |
| "Microsoft: Phi 4", | |
| "Microsoft: Phi-3.5 Mini 128K Instruct", | |
| "Microsoft: Phi-3 Mini 128K Instruct", | |
| "Microsoft: Phi-3 Medium 128K Instruct", | |
| "WizardLM-2 8x22B" | |
| ], | |
| "input_pricing": { | |
| "count": 8, | |
| "min": 0.049999999999999996, | |
| "max": 1.0, | |
| "mean": 0.27, | |
| "median": 0.09999999999999999 | |
| }, | |
| "output_pricing": { | |
| "count": 8, | |
| "min": 0.09999999999999999, | |
| "max": 1.2, | |
| "mean": 0.43374999999999997, | |
| "median": 0.245 | |
| }, | |
| "context_lengths": { | |
| "count": 9, | |
| "min": 16384, | |
| "max": 163840, | |
| "mean": 106382.22222222222, | |
| "median": 128000 | |
| } | |
| }, | |
| "EleutherAI": { | |
| "vendor": "EleutherAI", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "EleutherAI: Llemma 7b" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.7999999999999999, | |
| "max": 0.7999999999999999, | |
| "mean": 0.7999999999999999, | |
| "median": 0.7999999999999999 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 1.2, | |
| "max": 1.2, | |
| "mean": 1.2, | |
| "median": 1.2 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 4096, | |
| "max": 4096, | |
| "mean": 4096, | |
| "median": 4096 | |
| } | |
| }, | |
| "AlfredPros": { | |
| "vendor": "AlfredPros", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "AlfredPros: CodeLLaMa 7B Instruct Solidity" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.7999999999999999, | |
| "max": 0.7999999999999999, | |
| "mean": 0.7999999999999999, | |
| "median": 0.7999999999999999 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 1.2, | |
| "max": 1.2, | |
| "mean": 1.2, | |
| "median": 1.2 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 4096, | |
| "max": 4096, | |
| "mean": 4096, | |
| "median": 4096 | |
| } | |
| }, | |
| "Arli AI": { | |
| "vendor": "Arli AI", | |
| "total_models": 2, | |
| "free_models": 1, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "ArliAI: QwQ 32B RpR v1 (free)", | |
| "ArliAI: QwQ 32B RpR v1" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.03, | |
| "max": 0.03, | |
| "mean": 0.03, | |
| "median": 0.03 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.11, | |
| "max": 0.11, | |
| "mean": 0.11, | |
| "median": 0.11 | |
| }, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 32768, | |
| "max": 32768, | |
| "mean": 32768, | |
| "median": 32768.0 | |
| } | |
| }, | |
| "Agentica": { | |
| "vendor": "Agentica", | |
| "total_models": 2, | |
| "free_models": 1, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "Agentica: Deepcoder 14B Preview (free)", | |
| "Agentica: Deepcoder 14B Preview" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.015, | |
| "max": 0.015, | |
| "mean": 0.015, | |
| "median": 0.015 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.015, | |
| "max": 0.015, | |
| "mean": 0.015, | |
| "median": 0.015 | |
| }, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 96000, | |
| "max": 96000, | |
| "mean": 96000, | |
| "median": 96000.0 | |
| } | |
| }, | |
| "Allen Institute for AI": { | |
| "vendor": "Allen Institute for AI", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "AllenAI: Olmo 2 32B Instruct" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.19999999999999998, | |
| "max": 0.19999999999999998, | |
| "mean": 0.19999999999999998, | |
| "median": 0.19999999999999998 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.35, | |
| "max": 0.35, | |
| "mean": 0.35, | |
| "median": 0.35 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 4096, | |
| "max": 4096, | |
| "mean": 4096, | |
| "median": 4096 | |
| } | |
| }, | |
| "Cohere": { | |
| "vendor": "Cohere", | |
| "total_models": 4, | |
| "free_models": 0, | |
| "paid_models": 4, | |
| "model_names": [ | |
| "Cohere: Command A", | |
| "Cohere: Command R7B (12-2024)", | |
| "Cohere: Command R+ (08-2024)", | |
| "Cohere: Command R (08-2024)" | |
| ], | |
| "input_pricing": { | |
| "count": 4, | |
| "min": 0.0375, | |
| "max": 2.5, | |
| "mean": 1.296875, | |
| "median": 1.325 | |
| }, | |
| "output_pricing": { | |
| "count": 4, | |
| "min": 0.15, | |
| "max": 10.0, | |
| "mean": 5.1875, | |
| "median": 5.3 | |
| }, | |
| "context_lengths": { | |
| "count": 4, | |
| "min": 128000, | |
| "max": 256000, | |
| "mean": 160000, | |
| "median": 128000.0 | |
| } | |
| }, | |
| "Aion Labs": { | |
| "vendor": "Aion Labs", | |
| "total_models": 3, | |
| "free_models": 0, | |
| "paid_models": 3, | |
| "model_names": [ | |
| "AionLabs: Aion-1.0", | |
| "AionLabs: Aion-1.0-Mini", | |
| "AionLabs: Aion-RP 1.0 (8B)" | |
| ], | |
| "input_pricing": { | |
| "count": 3, | |
| "min": 0.19999999999999998, | |
| "max": 4.0, | |
| "mean": 1.6333333333333333, | |
| "median": 0.7 | |
| }, | |
| "output_pricing": { | |
| "count": 3, | |
| "min": 0.19999999999999998, | |
| "max": 8.0, | |
| "mean": 3.2, | |
| "median": 1.4 | |
| }, | |
| "context_lengths": { | |
| "count": 3, | |
| "min": 32768, | |
| "max": 131072, | |
| "mean": 98304, | |
| "median": 131072 | |
| } | |
| }, | |
| "Sao10k": { | |
| "vendor": "Sao10k", | |
| "total_models": 5, | |
| "free_models": 0, | |
| "paid_models": 5, | |
| "model_names": [ | |
| "Sao10K: Llama 3.1 70B Hanami x1", | |
| "Sao10K: Llama 3.3 Euryale 70B", | |
| "Sao10K: Llama 3.1 Euryale 70B v2.2", | |
| "Sao10K: Llama 3 8B Lunaris", | |
| "Sao10k: Llama 3 Euryale 70B v2.1" | |
| ], | |
| "input_pricing": { | |
| "count": 5, | |
| "min": 0.04, | |
| "max": 3.0, | |
| "mean": 1.164, | |
| "median": 0.65 | |
| }, | |
| "output_pricing": { | |
| "count": 5, | |
| "min": 0.049999999999999996, | |
| "max": 3.0, | |
| "mean": 1.206, | |
| "median": 0.75 | |
| }, | |
| "context_lengths": { | |
| "count": 5, | |
| "min": 8192, | |
| "max": 131072, | |
| "mean": 39244.8, | |
| "median": 16000 | |
| } | |
| }, | |
| "Raifle": { | |
| "vendor": "Raifle", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "SorcererLM 8x22B" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 4.5, | |
| "max": 4.5, | |
| "mean": 4.5, | |
| "median": 4.5 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 4.5, | |
| "max": 4.5, | |
| "mean": 4.5, | |
| "median": 4.5 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 16000, | |
| "max": 16000, | |
| "mean": 16000, | |
| "median": 16000 | |
| } | |
| }, | |
| "Anthracite": { | |
| "vendor": "Anthracite", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "Magnum v4 72B" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 3.0, | |
| "max": 3.0, | |
| "mean": 3.0, | |
| "median": 3.0 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 5.0, | |
| "max": 5.0, | |
| "mean": 5.0, | |
| "median": 5.0 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 16384, | |
| "max": 16384, | |
| "mean": 16384, | |
| "median": 16384 | |
| } | |
| }, | |
| "Inflection AI": { | |
| "vendor": "Inflection AI", | |
| "total_models": 2, | |
| "free_models": 0, | |
| "paid_models": 2, | |
| "model_names": [ | |
| "Inflection: Inflection 3 Productivity", | |
| "Inflection: Inflection 3 Pi" | |
| ], | |
| "input_pricing": { | |
| "count": 2, | |
| "min": 2.5, | |
| "max": 2.5, | |
| "mean": 2.5, | |
| "median": 2.5 | |
| }, | |
| "output_pricing": { | |
| "count": 2, | |
| "min": 10.0, | |
| "max": 10.0, | |
| "mean": 10.0, | |
| "median": 10.0 | |
| }, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 8000, | |
| "max": 8000, | |
| "mean": 8000, | |
| "median": 8000.0 | |
| } | |
| }, | |
| "Neversleep": { | |
| "vendor": "Neversleep", | |
| "total_models": 2, | |
| "free_models": 0, | |
| "paid_models": 2, | |
| "model_names": [ | |
| "NeverSleep: Lumimaid v0.2 8B", | |
| "Noromaid 20B" | |
| ], | |
| "input_pricing": { | |
| "count": 2, | |
| "min": 0.09, | |
| "max": 1.0, | |
| "mean": 0.545, | |
| "median": 0.545 | |
| }, | |
| "output_pricing": { | |
| "count": 2, | |
| "min": 0.6, | |
| "max": 1.75, | |
| "mean": 1.175, | |
| "median": 1.175 | |
| }, | |
| "context_lengths": { | |
| "count": 2, | |
| "min": 4096, | |
| "max": 32768, | |
| "mean": 18432, | |
| "median": 18432.0 | |
| } | |
| }, | |
| "Alpindale": { | |
| "vendor": "Alpindale", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "Goliath 120B" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 4.0, | |
| "max": 4.0, | |
| "mean": 4.0, | |
| "median": 4.0 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 5.5, | |
| "max": 5.5, | |
| "mean": 5.5, | |
| "median": 5.5 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 6144, | |
| "max": 6144, | |
| "mean": 6144, | |
| "median": 6144 | |
| } | |
| }, | |
| "Mancer": { | |
| "vendor": "Mancer", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "Mancer: Weaver (alpha)" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 1.125, | |
| "max": 1.125, | |
| "mean": 1.125, | |
| "median": 1.125 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 1.125, | |
| "max": 1.125, | |
| "mean": 1.125, | |
| "median": 1.125 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 8000, | |
| "max": 8000, | |
| "mean": 8000, | |
| "median": 8000 | |
| } | |
| }, | |
| "Undi95": { | |
| "vendor": "Undi95", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "ReMM SLERP 13B" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.44999999999999996, | |
| "max": 0.44999999999999996, | |
| "mean": 0.44999999999999996, | |
| "median": 0.44999999999999996 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.65, | |
| "max": 0.65, | |
| "mean": 0.65, | |
| "median": 0.65 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 6144, | |
| "max": 6144, | |
| "mean": 6144, | |
| "median": 6144 | |
| } | |
| }, | |
| "Gryphe": { | |
| "vendor": "Gryphe", | |
| "total_models": 1, | |
| "free_models": 0, | |
| "paid_models": 1, | |
| "model_names": [ | |
| "MythoMax 13B" | |
| ], | |
| "input_pricing": { | |
| "count": 1, | |
| "min": 0.06, | |
| "max": 0.06, | |
| "mean": 0.06, | |
| "median": 0.06 | |
| }, | |
| "output_pricing": { | |
| "count": 1, | |
| "min": 0.06, | |
| "max": 0.06, | |
| "mean": 0.06, | |
| "median": 0.06 | |
| }, | |
| "context_lengths": { | |
| "count": 1, | |
| "min": 4096, | |
| "max": 4096, | |
| "mean": 4096, | |
| "median": 4096 | |
| } | |
| } | |
| } |