zhimin-z commited on
Commit
ba6139e
·
1 Parent(s): c035c27
Files changed (3) hide show
  1. .gitignore +1 -0
  2. app.py +30 -36
  3. model_metadata.jsonl +0 -481
.gitignore CHANGED
@@ -3,4 +3,5 @@
3
  *.ipynb
4
  *.pyc
5
  .claude
 
6
  .omc
 
3
  *.ipynb
4
  *.pyc
5
  .claude
6
+ .vscode
7
  .omc
app.py CHANGED
@@ -35,6 +35,7 @@ LEADERBOARD_REPO = "SWE-Arena/leaderboard_data"
35
  VOTE_REPO = "SWE-Arena/vote_data"
36
  CONVERSATION_REPO = "SWE-Arena/conversation_data"
37
  LEADERBOARD_FILE = "chatbot_arena"
 
38
 
39
  # Timeout in seconds for model responses
40
  TIMEOUT = 90
@@ -46,28 +47,25 @@ LEADERBOARD_UPDATE_TIME_FRAME_DAYS = 365
46
  SHOW_HINT_STRING = True # Set to False to hide the hint string altogether
47
  HINT_STRING = "Once signed in, your votes will be recorded securely."
48
 
49
- # Load model metadata
50
- model_metadata = pd.read_json("model_metadata.jsonl", lines=True)
51
-
52
- # Create a dictionary mapping model names to their context lengths
53
- model_context_window = model_metadata.set_index("model_name")[
54
- "context_window"
55
- ].to_dict()
56
-
57
- # Create a dictionary mapping model names to their model IDs
58
- model_name_to_id = model_metadata.set_index("model_name")[
59
- "model_id"
60
- ].to_dict()
61
-
62
- # Create a dictionary mapping model names to their organizations
63
- # model_name format is "{Organization}: {Model}", so we split on ": "
64
- model_organization = {
65
- model_name: model_name.split(": ")[0]
66
- for model_name in model_metadata["model_name"]
67
- }
68
-
69
- # Get the list of available models
70
- available_models = model_metadata["model_name"].tolist()
71
 
72
 
73
  # ---------------------------------------------------------------------------
@@ -996,12 +994,13 @@ def is_file_within_time_frame(file_path, days):
996
  return False
997
 
998
 
999
- def load_content_from_hf(repo_name):
1000
  """
1001
  Read feedback content from a Hugging Face repository within the last LEADERBOARD_UPDATE_TIME_FRAME_DAYS days.
1002
 
1003
  Args:
1004
  repo_name (str): Hugging Face repository name.
 
1005
 
1006
  Returns:
1007
  list: Aggregated feedback data read from the repository.
@@ -1009,8 +1008,10 @@ def load_content_from_hf(repo_name):
1009
  data = []
1010
  try:
1011
  api = HfApi()
1012
- # List all files in the repository
1013
  for file in api.list_repo_files(repo_id=repo_name, repo_type="dataset"):
 
 
1014
  # Filter files by last LEADERBOARD_UPDATE_TIME_FRAME_DAYS days
1015
  if not is_file_within_time_frame(file, LEADERBOARD_UPDATE_TIME_FRAME_DAYS):
1016
  continue
@@ -1058,7 +1059,7 @@ def get_leaderboard_data(vote_entry=None, use_cache=True):
1058
  print(f"No cached leaderboard found, computing from votes...")
1059
 
1060
  # Load feedback data from the Hugging Face repository
1061
- data = load_content_from_hf(VOTE_REPO)
1062
  vote_df = pd.DataFrame(data)
1063
 
1064
  # Concatenate the new feedback with the existing leaderboard data
@@ -1082,7 +1083,7 @@ def get_leaderboard_data(vote_entry=None, use_cache=True):
1082
  )
1083
 
1084
  # Load conversation data from the Hugging Face repository
1085
- conversation_data = load_content_from_hf(CONVERSATION_REPO)
1086
  conversation_df = pd.DataFrame(conversation_data)
1087
 
1088
  # Merge vote data with conversation data
@@ -1454,7 +1455,7 @@ with gr.Blocks(title="SWE-Chatbot-Arena", theme=gr.themes.Soft()) as app:
1454
  # NEW: Add a textbox for the repository URL above the user prompt
1455
  repo_url = gr.Textbox(
1456
  show_label=False,
1457
- placeholder="Optional: Enter the URL of a repository (GitHub, GitLab, Hugging Face), issue, commit, or pull request.",
1458
  lines=1,
1459
  interactive=False,
1460
  )
@@ -2061,9 +2062,7 @@ with gr.Blocks(title="SWE-Chatbot-Arena", theme=gr.themes.Soft()) as app:
2061
  file_name = f"{LEADERBOARD_FILE}/{datetime.now().strftime('%Y%m%d_%H%M%S')}"
2062
 
2063
  # Save feedback back to the Hugging Face dataset
2064
- save_content_to_hf(
2065
- vote_entry, VOTE_REPO, file_name, token
2066
- )
2067
 
2068
  conversation_state["right_chat"][0]["content"] = conversation_state[
2069
  "right_chat"
@@ -2073,12 +2072,7 @@ with gr.Blocks(title="SWE-Chatbot-Arena", theme=gr.themes.Soft()) as app:
2073
  ][0]["content"].split("\n\nInquiry: ")[-1]
2074
 
2075
  # Save conversations back to the Hugging Face dataset
2076
- save_content_to_hf(
2077
- conversation_state,
2078
- CONVERSATION_REPO,
2079
- file_name,
2080
- token,
2081
- )
2082
 
2083
  # Clear state
2084
  models_state.clear()
 
35
  VOTE_REPO = "SWE-Arena/vote_data"
36
  CONVERSATION_REPO = "SWE-Arena/conversation_data"
37
  LEADERBOARD_FILE = "chatbot_arena"
38
+ MODEL_REPO = "SWE-Arena/model_data"
39
 
40
  # Timeout in seconds for model responses
41
  TIMEOUT = 90
 
47
  SHOW_HINT_STRING = True # Set to False to hide the hint string altogether
48
  HINT_STRING = "Once signed in, your votes will be recorded securely."
49
 
50
+ # Load model metadata from Hugging Face
51
+ model_context_window = {}
52
+ model_name_to_id = {}
53
+ model_organization = {}
54
+ available_models = []
55
+
56
+ _api = HfApi()
57
+ for _file in _api.list_repo_files(repo_id=MODEL_REPO, repo_type="dataset"):
58
+ if not _file.endswith(".json"):
59
+ continue
60
+ _local_path = hf_hub_download(repo_id=MODEL_REPO, filename=_file, repo_type="dataset")
61
+ with open(_local_path, "r") as f:
62
+ _record = json.load(f)
63
+ # model_name is derived from the filename (without .json extension)
64
+ _model_name = _file.rsplit("/", 1)[-1].replace(".json", "")
65
+ available_models.append(_model_name)
66
+ model_context_window[_model_name] = _record["context_window"]
67
+ model_name_to_id[_model_name] = _record["model_id"]
68
+ model_organization[_model_name] = _model_name.split(": ")[0]
 
 
 
69
 
70
 
71
  # ---------------------------------------------------------------------------
 
994
  return False
995
 
996
 
997
+ def load_content_from_hf(repo_name, file_name):
998
  """
999
  Read feedback content from a Hugging Face repository within the last LEADERBOARD_UPDATE_TIME_FRAME_DAYS days.
1000
 
1001
  Args:
1002
  repo_name (str): Hugging Face repository name.
1003
+ file_name (str): Only load files under this prefix directory.
1004
 
1005
  Returns:
1006
  list: Aggregated feedback data read from the repository.
 
1008
  data = []
1009
  try:
1010
  api = HfApi()
1011
+ # List all files in the repository, only under the file_name
1012
  for file in api.list_repo_files(repo_id=repo_name, repo_type="dataset"):
1013
+ if not file.startswith(f"{file_name}/"):
1014
+ continue
1015
  # Filter files by last LEADERBOARD_UPDATE_TIME_FRAME_DAYS days
1016
  if not is_file_within_time_frame(file, LEADERBOARD_UPDATE_TIME_FRAME_DAYS):
1017
  continue
 
1059
  print(f"No cached leaderboard found, computing from votes...")
1060
 
1061
  # Load feedback data from the Hugging Face repository
1062
+ data = load_content_from_hf(VOTE_REPO, LEADERBOARD_FILE)
1063
  vote_df = pd.DataFrame(data)
1064
 
1065
  # Concatenate the new feedback with the existing leaderboard data
 
1083
  )
1084
 
1085
  # Load conversation data from the Hugging Face repository
1086
+ conversation_data = load_content_from_hf(CONVERSATION_REPO, LEADERBOARD_FILE)
1087
  conversation_df = pd.DataFrame(conversation_data)
1088
 
1089
  # Merge vote data with conversation data
 
1455
  # NEW: Add a textbox for the repository URL above the user prompt
1456
  repo_url = gr.Textbox(
1457
  show_label=False,
1458
+ placeholder="Optional: Enter any GitHub, GitLab, or Hugging Face URL.",
1459
  lines=1,
1460
  interactive=False,
1461
  )
 
2062
  file_name = f"{LEADERBOARD_FILE}/{datetime.now().strftime('%Y%m%d_%H%M%S')}"
2063
 
2064
  # Save feedback back to the Hugging Face dataset
2065
+ save_content_to_hf(vote_entry, VOTE_REPO, file_name, token)
 
 
2066
 
2067
  conversation_state["right_chat"][0]["content"] = conversation_state[
2068
  "right_chat"
 
2072
  ][0]["content"].split("\n\nInquiry: ")[-1]
2073
 
2074
  # Save conversations back to the Hugging Face dataset
2075
+ save_content_to_hf(conversation_state, CONVERSATION_REPO, file_name, token)
 
 
 
 
 
2076
 
2077
  # Clear state
2078
  models_state.clear()
model_metadata.jsonl DELETED
@@ -1,481 +0,0 @@
1
- {"model_name": "Z.AI: GLM 4.7 Flash", "model_id": "z-ai/glm-4.7-flash", "context_window": 200000}
2
- {"model_name": "OpenAI: GPT-5.2-Codex", "model_id": "openai/gpt-5.2-codex", "context_window": 400000}
3
- {"model_name": "AllenAI: Olmo 3.1 32B Instruct", "model_id": "allenai/olmo-3.1-32b-instruct", "context_window": 65536}
4
- {"model_name": "ByteDance Seed: Seed 1.6 Flash", "model_id": "bytedance-seed/seed-1.6-flash", "context_window": 262144}
5
- {"model_name": "ByteDance Seed: Seed 1.6", "model_id": "bytedance-seed/seed-1.6", "context_window": 262144}
6
- {"model_name": "MiniMax: MiniMax M2.1", "model_id": "minimax/minimax-m2.1", "context_window": 196608}
7
- {"model_name": "Z.AI: GLM 4.7", "model_id": "z-ai/glm-4.7", "context_window": 202752}
8
- {"model_name": "Google: Gemini 3 Flash Preview", "model_id": "google/gemini-3-flash-preview", "context_window": 1048576}
9
- {"model_name": "Mistral: Mistral Small Creative", "model_id": "mistralai/mistral-small-creative", "context_window": 32768}
10
- {"model_name": "AllenAI: Olmo 3.1 32B Think", "model_id": "allenai/olmo-3.1-32b-think", "context_window": 65536}
11
- {"model_name": "Xiaomi: MiMo-V2-Flash", "model_id": "xiaomi/mimo-v2-flash", "context_window": 262144}
12
- {"model_name": "NVIDIA: Nemotron 3 Nano 30B A3B", "model_id": "nvidia/nemotron-3-nano-30b-a3b", "context_window": 262144}
13
- {"model_name": "OpenAI: GPT-5.2 Chat", "model_id": "openai/gpt-5.2-chat", "context_window": 128000}
14
- {"model_name": "OpenAI: GPT-5.2 Pro", "model_id": "openai/gpt-5.2-pro", "context_window": 400000}
15
- {"model_name": "OpenAI: GPT-5.2", "model_id": "openai/gpt-5.2", "context_window": 400000}
16
- {"model_name": "Relace: Relace Search", "model_id": "relace/relace-search", "context_window": 256000}
17
- {"model_name": "Z.AI: GLM 4.6V", "model_id": "z-ai/glm-4.6v", "context_window": 131072}
18
- {"model_name": "Nex AGI: DeepSeek V3.1 Nex N1", "model_id": "nex-agi/deepseek-v3.1-nex-n1", "context_window": 131072}
19
- {"model_name": "EssentialAI: Rnj 1 Instruct", "model_id": "essentialai/rnj-1-instruct", "context_window": 32768}
20
- {"model_name": "OpenAI: GPT-5.1-Codex-Max", "model_id": "openai/gpt-5.1-codex-max", "context_window": 400000}
21
- {"model_name": "Amazon: Nova 2 Lite", "model_id": "amazon/nova-2-lite-v1", "context_window": 1000000}
22
- {"model_name": "Mistral: Ministral 3 14B 2512", "model_id": "mistralai/ministral-14b-2512", "context_window": 262144}
23
- {"model_name": "Mistral: Ministral 3 8B 2512", "model_id": "mistralai/ministral-8b-2512", "context_window": 262144}
24
- {"model_name": "Mistral: Ministral 3 3B 2512", "model_id": "mistralai/ministral-3b-2512", "context_window": 131072}
25
- {"model_name": "Mistral: Mistral Large 3 2512", "model_id": "mistralai/mistral-large-2512", "context_window": 262144}
26
- {"model_name": "Arcee AI: Trinity Mini", "model_id": "arcee-ai/trinity-mini", "context_window": 131072}
27
- {"model_name": "DeepSeek: DeepSeek V3.2 Speciale", "model_id": "deepseek/deepseek-v3.2-speciale", "context_window": 163840}
28
- {"model_name": "DeepSeek: DeepSeek V3.2", "model_id": "deepseek/deepseek-v3.2", "context_window": 163840}
29
- {"model_name": "Prime Intellect: INTELLECT-3", "model_id": "prime-intellect/intellect-3", "context_window": 131072}
30
- {"model_name": "TNG: R1T Chimera", "model_id": "tngtech/tng-r1t-chimera", "context_window": 163840}
31
- {"model_name": "Anthropic: Claude Opus 4.5", "model_id": "anthropic/claude-opus-4.5", "context_window": 200000}
32
- {"model_name": "AllenAI: Olmo 3 32B Think", "model_id": "allenai/olmo-3-32b-think", "context_window": 65536}
33
- {"model_name": "AllenAI: Olmo 3 7B Instruct", "model_id": "allenai/olmo-3-7b-instruct", "context_window": 65536}
34
- {"model_name": "AllenAI: Olmo 3 7B Think", "model_id": "allenai/olmo-3-7b-think", "context_window": 65536}
35
- {"model_name": "xAI: Grok 4.1 Fast", "model_id": "x-ai/grok-4.1-fast", "context_window": 1048576}
36
- {"model_name": "Deep Cogito: Cogito v2.1 671B", "model_id": "deepcogito/cogito-v2.1-671b", "context_window": 128000}
37
- {"model_name": "OpenAI: GPT-5.1", "model_id": "openai/gpt-5.1", "context_window": 400000}
38
- {"model_name": "OpenAI: GPT-5.1 Chat", "model_id": "openai/gpt-5.1-chat", "context_window": 128000}
39
- {"model_name": "OpenAI: GPT-5.1-Codex", "model_id": "openai/gpt-5.1-codex", "context_window": 400000}
40
- {"model_name": "OpenAI: GPT-5.1-Codex-Mini", "model_id": "openai/gpt-5.1-codex-mini", "context_window": 400000}
41
- {"model_name": "Kwaipilot: KAT-Coder-Pro V1", "model_id": "kwaipilot/kat-coder-pro", "context_window": 256000}
42
- {"model_name": "MoonshotAI: Kimi K2 Thinking", "model_id": "moonshotai/kimi-k2-thinking", "context_window": 262144}
43
- {"model_name": "Amazon: Nova Premier 1.0", "model_id": "amazon/nova-premier-v1", "context_window": 1000000}
44
- {"model_name": "Perplexity: Sonar Pro Search", "model_id": "perplexity/sonar-pro-search", "context_window": 200000}
45
- {"model_name": "Mistral: Voxtral Small 24B 2507", "model_id": "mistralai/voxtral-small-24b-2507", "context_window": 32000}
46
- {"model_name": "OpenAI: gpt-oss-safeguard-20b", "model_id": "openai/gpt-oss-safeguard-20b", "context_window": 131072}
47
- {"model_name": "NVIDIA: Nemotron Nano 12B 2 VL", "model_id": "nvidia/nemotron-nano-12b-v2-vl", "context_window": 131072}
48
- {"model_name": "MiniMax: MiniMax M2", "model_id": "minimax/minimax-m2", "context_window": 196608}
49
- {"model_name": "Qwen: Qwen3 VL 32B Instruct", "model_id": "qwen/qwen3-vl-32b-instruct", "context_window": 262144}
50
- {"model_name": "LiquidAI/LFM2-8B-A1B", "model_id": "liquid/lfm2-8b-a1b", "context_window": 32768}
51
- {"model_name": "LiquidAI/LFM2-2.6B", "model_id": "liquid/lfm-2.2-6b", "context_window": 32768}
52
- {"model_name": "IBM: Granite 4.0 Micro", "model_id": "ibm-granite/granite-4.0-h-micro", "context_window": 131000}
53
- {"model_name": "Deep Cogito: Cogito V2 Preview Llama 405B", "model_id": "deepcogito/cogito-v2-preview-llama-405b", "context_window": 32768}
54
- {"model_name": "OpenAI: GPT-5 Image Mini", "model_id": "openai/gpt-5-image-mini", "context_window": 400000}
55
- {"model_name": "Anthropic: Claude Haiku 4.5", "model_id": "anthropic/claude-haiku-4.5", "context_window": 200000}
56
- {"model_name": "Qwen: Qwen3 VL 8B Thinking", "model_id": "qwen/qwen3-vl-8b-thinking", "context_window": 256000}
57
- {"model_name": "Qwen: Qwen3 VL 8B Instruct", "model_id": "qwen/qwen3-vl-8b-instruct", "context_window": 131072}
58
- {"model_name": "OpenAI: GPT-5 Image", "model_id": "openai/gpt-5-image", "context_window": 400000}
59
- {"model_name": "OpenAI: o3 Deep Research", "model_id": "openai/o3-deep-research", "context_window": 200000}
60
- {"model_name": "OpenAI: o4 Mini Deep Research", "model_id": "openai/o4-mini-deep-research", "context_window": 200000}
61
- {"model_name": "NVIDIA: Llama 3.3 Nemotron Super 49B V1.5", "model_id": "nvidia/llama-3.3-nemotron-super-49b-v1.5", "context_window": 131072}
62
- {"model_name": "Baidu: ERNIE 4.5 21B A3B Thinking", "model_id": "baidu/ernie-4.5-21b-a3b-thinking", "context_window": 131072}
63
- {"model_name": "Qwen: Qwen3 VL 30B A3B Thinking", "model_id": "qwen/qwen3-vl-30b-a3b-thinking", "context_window": 131072}
64
- {"model_name": "Qwen: Qwen3 VL 30B A3B Instruct", "model_id": "qwen/qwen3-vl-30b-a3b-instruct", "context_window": 262144}
65
- {"model_name": "OpenAI: GPT-5 Pro", "model_id": "openai/gpt-5-pro", "context_window": 400000}
66
- {"model_name": "Z.AI: GLM 4.6", "model_id": "z-ai/glm-4.6", "context_window": 202752}
67
- {"model_name": "Anthropic: Claude Sonnet 4.5", "model_id": "anthropic/claude-sonnet-4.5", "context_window": 1000000}
68
- {"model_name": "DeepSeek: DeepSeek V3.2 Exp", "model_id": "deepseek/deepseek-v3.2-exp", "context_window": 163840}
69
- {"model_name": "TheDrummer: Cydonia 24B V4.1", "model_id": "thedrummer/cydonia-24b-v4.1", "context_window": 131072}
70
- {"model_name": "Relace: Relace Apply 3", "model_id": "relace/relace-apply-3", "context_window": 256000}
71
- {"model_name": "Google: Gemini 2.5 Flash Preview 09-2025", "model_id": "google/gemini-2.5-flash-preview-09-2025", "context_window": 1048576}
72
- {"model_name": "Google: Gemini 2.5 Flash Lite Preview 09-2025", "model_id": "google/gemini-2.5-flash-lite-preview-09-2025", "context_window": 1048576}
73
- {"model_name": "Qwen: Qwen3 VL 235B A22B Thinking", "model_id": "qwen/qwen3-vl-235b-a22b-thinking", "context_window": 131072}
74
- {"model_name": "Qwen: Qwen3 VL 235B A22B Instruct", "model_id": "qwen/qwen3-vl-235b-a22b-instruct", "context_window": 262144}
75
- {"model_name": "Qwen: Qwen3 Max", "model_id": "qwen/qwen3-max", "context_window": 256000}
76
- {"model_name": "Qwen: Qwen3 Coder Plus", "model_id": "qwen/qwen3-coder-plus", "context_window": 128000}
77
- {"model_name": "OpenAI: GPT-5 Codex", "model_id": "openai/gpt-5-codex", "context_window": 400000}
78
- {"model_name": "DeepSeek: DeepSeek V3.1 Terminus", "model_id": "deepseek/deepseek-v3.1-terminus", "context_window": 163840}
79
- {"model_name": "xAI: Grok 4 Fast", "model_id": "x-ai/grok-4-fast", "context_window": 131072}
80
- {"model_name": "Qwen: Qwen3 Coder Flash", "model_id": "qwen/qwen3-coder-flash", "context_window": 128000}
81
- {"model_name": "OpenGVLab: InternVL3 78B", "model_id": "opengvlab/internvl3-78b", "context_window": 32768}
82
- {"model_name": "Qwen: Qwen3 Next 80B A3B Thinking", "model_id": "qwen/qwen3-next-80b-a3b-thinking", "context_window": 128000}
83
- {"model_name": "Qwen: Qwen3 Next 80B A3B Instruct", "model_id": "qwen/qwen3-next-80b-a3b-instruct", "context_window": 262144}
84
- {"model_name": "Meituan: LongCat Flash Chat", "model_id": "meituan/longcat-flash-chat", "context_window": 131072}
85
- {"model_name": "Qwen: Qwen Plus 0728", "model_id": "qwen/qwen-plus-2025-07-28", "context_window": 1000000}
86
- {"model_name": "Qwen: Qwen Plus 0728 (thinking)", "model_id": "qwen/qwen-plus-2025-07-28:thinking", "context_window": 1000000}
87
- {"model_name": "NVIDIA: Nemotron Nano 9B V2", "model_id": "nvidia/nemotron-nano-9b-v2", "context_window": 131072}
88
- {"model_name": "MoonshotAI: Kimi K2 0905", "model_id": "moonshotai/kimi-k2-0905", "context_window": 262144}
89
- {"model_name": "Deep Cogito: Cogito V2 Preview Llama 70B", "model_id": "deepcogito/cogito-v2-preview-llama-70b", "context_window": 32768}
90
- {"model_name": "Cogito V2 Preview Llama 109B", "model_id": "deepcogito/cogito-v2-preview-llama-109b-moe", "context_window": 32767}
91
- {"model_name": "StepFun: Step3", "model_id": "stepfun-ai/step3", "context_window": 65536}
92
- {"model_name": "Qwen: Qwen3 30B A3B Thinking 2507", "model_id": "qwen/qwen3-30b-a3b-thinking-2507", "context_window": 32768}
93
- {"model_name": "xAI: Grok Code Fast 1", "model_id": "x-ai/grok-code-fast-1", "context_window": 256000}
94
- {"model_name": "Nous: Hermes 4 70B", "model_id": "nousresearch/hermes-4-70b", "context_window": 131072}
95
- {"model_name": "Nous: Hermes 4 405B", "model_id": "nousresearch/hermes-4-405b", "context_window": 131072}
96
- {"model_name": "DeepSeek: DeepSeek V3.1", "model_id": "deepseek/deepseek-chat-v3.1", "context_window": 32768}
97
- {"model_name": "OpenAI: GPT-4o Audio", "model_id": "openai/gpt-4o-audio-preview", "context_window": 128000}
98
- {"model_name": "Mistral: Mistral Medium 3.1", "model_id": "mistralai/mistral-medium-3.1", "context_window": 131072}
99
- {"model_name": "Baidu: ERNIE 4.5 21B A3B", "model_id": "baidu/ernie-4.5-21b-a3b", "context_window": 120000}
100
- {"model_name": "Baidu: ERNIE 4.5 VL 28B A3B", "model_id": "baidu/ernie-4.5-vl-28b-a3b", "context_window": 30000}
101
- {"model_name": "Z.AI: GLM 4.5V", "model_id": "z-ai/glm-4.5v", "context_window": 65536}
102
- {"model_name": "AI21: Jamba Mini 1.7", "model_id": "ai21/jamba-mini-1.7", "context_window": 256000}
103
- {"model_name": "AI21: Jamba Large 1.7", "model_id": "ai21/jamba-large-1.7", "context_window": 256000}
104
- {"model_name": "OpenAI: GPT-5 Chat", "model_id": "openai/gpt-5-chat", "context_window": 128000}
105
- {"model_name": "OpenAI: GPT-5", "model_id": "openai/gpt-5", "context_window": 400000}
106
- {"model_name": "OpenAI: GPT-5 Mini", "model_id": "openai/gpt-5-mini", "context_window": 400000}
107
- {"model_name": "OpenAI: GPT-5 Nano", "model_id": "openai/gpt-5-nano", "context_window": 400000}
108
- {"model_name": "OpenAI: gpt-oss-120b", "model_id": "openai/gpt-oss-120b", "context_window": 131072}
109
- {"model_name": "OpenAI: gpt-oss-20b", "model_id": "openai/gpt-oss-20b", "context_window": 131072}
110
- {"model_name": "Anthropic: Claude Opus 4.1", "model_id": "anthropic/claude-opus-4.1", "context_window": 200000}
111
- {"model_name": "Mistral: Codestral 2508", "model_id": "mistralai/codestral-2508", "context_window": 256000}
112
- {"model_name": "Qwen: Qwen3 Coder 30B A3B Instruct", "model_id": "qwen/qwen3-coder-30b-a3b-instruct", "context_window": 160000}
113
- {"model_name": "Qwen: Qwen3 30B A3B Instruct 2507", "model_id": "qwen/qwen3-30b-a3b-instruct-2507", "context_window": 262144}
114
- {"model_name": "Z.AI: GLM 4.5", "model_id": "z-ai/glm-4.5", "context_window": 131072}
115
- {"model_name": "Z.AI: GLM 4.5 Air", "model_id": "z-ai/glm-4.5-air", "context_window": 131072}
116
- {"model_name": "Qwen: Qwen3 235B A22B Thinking 2507", "model_id": "qwen/qwen3-235b-a22b-thinking-2507", "context_window": 262144}
117
- {"model_name": "Z.AI: GLM 4 32B", "model_id": "z-ai/glm-4-32b", "context_window": 128000}
118
- {"model_name": "Qwen: Qwen3 Coder 480B A35B", "model_id": "qwen/qwen3-coder", "context_window": 262144}
119
- {"model_name": "ByteDance: UI-TARS 7B", "model_id": "bytedance/ui-tars-1.5-7b", "context_window": 128000}
120
- {"model_name": "Google: Gemini 2.5 Flash Lite", "model_id": "google/gemini-2.5-flash-lite", "context_window": 1048576}
121
- {"model_name": "Qwen: Qwen3 235B A22B Instruct 2507", "model_id": "qwen/qwen3-235b-a22b-2507", "context_window": 262144}
122
- {"model_name": "Switchpoint Router", "model_id": "switchpoint/router", "context_window": 131072}
123
- {"model_name": "MoonshotAI: Kimi K2 0711", "model_id": "moonshotai/kimi-k2", "context_window": 131072}
124
- {"model_name": "Mistral: Devstral Medium", "model_id": "mistralai/devstral-medium", "context_window": 131072}
125
- {"model_name": "Mistral: Devstral Small 1.1", "model_id": "mistralai/devstral-small", "context_window": 131072}
126
- {"model_name": "xAI: Grok 4", "model_id": "x-ai/grok-4", "context_window": 256000}
127
- {"model_name": "Tencent: Hunyuan A13B Instruct", "model_id": "tencent/hunyuan-a13b-instruct", "context_window": 131072}
128
- {"model_name": "TNG: DeepSeek R1T2 Chimera", "model_id": "tngtech/deepseek-r1t2-chimera", "context_window": 163840}
129
- {"model_name": "Morph: Morph V3 Large", "model_id": "morph/morph-v3-large", "context_window": 262144}
130
- {"model_name": "Morph: Morph V3 Fast", "model_id": "morph/morph-v3-fast", "context_window": 81920}
131
- {"model_name": "Baidu: ERNIE 4.5 VL 424B A47B", "model_id": "baidu/ernie-4.5-vl-424b-a47b", "context_window": 123000}
132
- {"model_name": "Baidu: ERNIE 4.5 300B A47B", "model_id": "baidu/ernie-4.5-300b-a47b", "context_window": 123000}
133
- {"model_name": "Inception: Mercury", "model_id": "inception/mercury", "context_window": 128000}
134
- {"model_name": "Mistral: Mistral Small 3.2 24B", "model_id": "mistralai/mistral-small-3.2-24b-instruct", "context_window": 131072}
135
- {"model_name": "MiniMax: MiniMax M1", "model_id": "minimax/minimax-m1", "context_window": 1000000}
136
- {"model_name": "Google: Gemini 2.5 Flash", "model_id": "google/gemini-2.5-flash", "context_window": 1048576}
137
- {"model_name": "Google: Gemini 2.5 Pro", "model_id": "google/gemini-2.5-pro", "context_window": 1048576}
138
- {"model_name": "MoonshotAI: Kimi Dev 72B", "model_id": "moonshotai/kimi-dev-72b", "context_window": 131072}
139
- {"model_name": "OpenAI: o3 Pro", "model_id": "openai/o3-pro", "context_window": 200000}
140
- {"model_name": "xAI: Grok 3 Mini", "model_id": "x-ai/grok-3-mini", "context_window": 131072}
141
- {"model_name": "xAI: Grok 3", "model_id": "x-ai/grok-3", "context_window": 131072}
142
- {"model_name": "Google: Gemini 2.5 Pro Preview 06-05", "model_id": "google/gemini-2.5-pro-preview", "context_window": 1048576}
143
- {"model_name": "DeepSeek: R1 0528", "model_id": "deepseek/deepseek-r1-0528", "context_window": 163840}
144
- {"model_name": "Anthropic: Claude Opus 4", "model_id": "anthropic/claude-opus-4", "context_window": 200000}
145
- {"model_name": "Anthropic: Claude Sonnet 4", "model_id": "anthropic/claude-sonnet-4", "context_window": 1000000}
146
- {"model_name": "Google: Gemma 3n 4B", "model_id": "google/gemma-3n-e4b-it", "context_window": 32768}
147
- {"model_name": "Nous: DeepHermes 3 Mistral 24B Preview", "model_id": "nousresearch/deephermes-3-mistral-24b-preview", "context_window": 32768}
148
- {"model_name": "Mistral: Mistral Medium 3", "model_id": "mistralai/mistral-medium-3", "context_window": 131072}
149
- {"model_name": "Google: Gemini 2.5 Pro Preview 05-06", "model_id": "google/gemini-2.5-pro-preview-05-06", "context_window": 1048576}
150
- {"model_name": "Inception: Mercury Coder", "model_id": "inception/mercury-coder", "context_window": 128000}
151
- {"model_name": "Meta: Llama Guard 4 12B", "model_id": "meta-llama/llama-guard-4-12b", "context_window": 163840}
152
- {"model_name": "Qwen: Qwen3 30B A3B", "model_id": "qwen/qwen3-30b-a3b", "context_window": 40960}
153
- {"model_name": "Qwen: Qwen3 8B", "model_id": "qwen/qwen3-8b", "context_window": 32000}
154
- {"model_name": "Qwen: Qwen3 14B", "model_id": "qwen/qwen3-14b", "context_window": 40960}
155
- {"model_name": "Qwen: Qwen3 32B", "model_id": "qwen/qwen3-32b", "context_window": 40960}
156
- {"model_name": "Qwen: Qwen3 235B A22B", "model_id": "qwen/qwen3-235b-a22b", "context_window": 40960}
157
- {"model_name": "TNG: DeepSeek R1T Chimera", "model_id": "tngtech/deepseek-r1t-chimera", "context_window": 163840}
158
- {"model_name": "OpenAI: o4 Mini High", "model_id": "openai/o4-mini-high", "context_window": 200000}
159
- {"model_name": "OpenAI: o3", "model_id": "openai/o3", "context_window": 200000}
160
- {"model_name": "OpenAI: o4 Mini", "model_id": "openai/o4-mini", "context_window": 200000}
161
- {"model_name": "Qwen: Qwen2.5 Coder 7B Instruct", "model_id": "qwen/qwen2.5-coder-7b-instruct", "context_window": 32768}
162
- {"model_name": "OpenAI: GPT-4.1", "model_id": "openai/gpt-4.1", "context_window": 1047576}
163
- {"model_name": "OpenAI: GPT-4.1 Mini", "model_id": "openai/gpt-4.1-mini", "context_window": 1047576}
164
- {"model_name": "OpenAI: GPT-4.1 Nano", "model_id": "openai/gpt-4.1-nano", "context_window": 1047576}
165
- {"model_name": "EleutherAI: Llemma 7b", "model_id": "eleutherai/llemma_7b", "context_window": 4096}
166
- {"model_name": "AlfredPros: CodeLLaMa 7B Instruct Solidity", "model_id": "alfredpros/codellama-7b-instruct-solidity", "context_window": 4096}
167
- {"model_name": "xAI: Grok 3 Mini Beta", "model_id": "x-ai/grok-3-mini-beta", "context_window": 131072}
168
- {"model_name": "xAI: Grok 3 Beta", "model_id": "x-ai/grok-3-beta", "context_window": 131072}
169
- {"model_name": "NVIDIA: Llama 3.1 Nemotron Ultra 253B v1", "model_id": "nvidia/llama-3.1-nemotron-ultra-253b-v1", "context_window": 131072}
170
- {"model_name": "Meta: Llama 4 Maverick", "model_id": "meta-llama/llama-4-maverick", "context_window": 1048576}
171
- {"model_name": "Meta: Llama 4 Scout", "model_id": "meta-llama/llama-4-scout", "context_window": 327680}
172
- {"model_name": "Qwen: Qwen2.5 VL 32B Instruct", "model_id": "qwen/qwen2.5-vl-32b-instruct", "context_window": 16384}
173
- {"model_name": "DeepSeek: DeepSeek V3 0324", "model_id": "deepseek/deepseek-chat-v3-0324", "context_window": 163840}
174
- {"model_name": "OpenAI: o1-pro", "model_id": "openai/o1-pro", "context_window": 200000}
175
- {"model_name": "Mistral: Mistral Small 3.1 24B", "model_id": "mistralai/mistral-small-3.1-24b-instruct", "context_window": 131072}
176
- {"model_name": "AllenAI: Olmo 2 32B Instruct", "model_id": "allenai/olmo-2-0325-32b-instruct", "context_window": 128000}
177
- {"model_name": "Google: Gemma 3 4B", "model_id": "google/gemma-3-4b-it", "context_window": 96000}
178
- {"model_name": "Google: Gemma 3 12B", "model_id": "google/gemma-3-12b-it", "context_window": 131072}
179
- {"model_name": "Cohere: Command A", "model_id": "cohere/command-a", "context_window": 256000}
180
- {"model_name": "OpenAI: GPT-4o-mini Search Preview", "model_id": "openai/gpt-4o-mini-search-preview", "context_window": 128000}
181
- {"model_name": "OpenAI: GPT-4o Search Preview", "model_id": "openai/gpt-4o-search-preview", "context_window": 128000}
182
- {"model_name": "Google: Gemma 3 27B", "model_id": "google/gemma-3-27b-it", "context_window": 96000}
183
- {"model_name": "TheDrummer: Skyfall 36B V2", "model_id": "thedrummer/skyfall-36b-v2", "context_window": 32768}
184
- {"model_name": "Perplexity: Sonar Reasoning Pro", "model_id": "perplexity/sonar-reasoning-pro", "context_window": 128000}
185
- {"model_name": "Perplexity: Sonar Pro", "model_id": "perplexity/sonar-pro", "context_window": 200000}
186
- {"model_name": "Perplexity: Sonar Deep Research", "model_id": "perplexity/sonar-deep-research", "context_window": 128000}
187
- {"model_name": "Qwen: QwQ 32B", "model_id": "qwen/qwq-32b", "context_window": 32768}
188
- {"model_name": "Google: Gemini 2.0 Flash Lite", "model_id": "google/gemini-2.0-flash-lite-001", "context_window": 1048576}
189
- {"model_name": "Anthropic: Claude 3.7 Sonnet (thinking)", "model_id": "anthropic/claude-3.7-sonnet:thinking", "context_window": 200000}
190
- {"model_name": "Anthropic: Claude 3.7 Sonnet", "model_id": "anthropic/claude-3.7-sonnet", "context_window": 200000}
191
- {"model_name": "Mistral: Saba", "model_id": "mistralai/mistral-saba", "context_window": 32768}
192
- {"model_name": "Llama Guard 3 8B", "model_id": "meta-llama/llama-guard-3-8b", "context_window": 131072}
193
- {"model_name": "OpenAI: o3 Mini High", "model_id": "openai/o3-mini-high", "context_window": 200000}
194
- {"model_name": "Google: Gemini 2.0 Flash", "model_id": "google/gemini-2.0-flash-001", "context_window": 1048576}
195
- {"model_name": "Qwen: Qwen VL Plus", "model_id": "qwen/qwen-vl-plus", "context_window": 7500}
196
- {"model_name": "AionLabs: Aion-1.0", "model_id": "aion-labs/aion-1.0", "context_window": 131072}
197
- {"model_name": "AionLabs: Aion-1.0-Mini", "model_id": "aion-labs/aion-1.0-mini", "context_window": 131072}
198
- {"model_name": "Qwen: Qwen VL Max", "model_id": "qwen/qwen-vl-max", "context_window": 131072}
199
- {"model_name": "Qwen: Qwen-Turbo", "model_id": "qwen/qwen-turbo", "context_window": 1000000}
200
- {"model_name": "Qwen: Qwen2.5 VL 72B Instruct", "model_id": "qwen/qwen2.5-vl-72b-instruct", "context_window": 32768}
201
- {"model_name": "Qwen: Qwen-Plus", "model_id": "qwen/qwen-plus", "context_window": 131072}
202
- {"model_name": "Qwen: Qwen-Max", "model_id": "qwen/qwen-max", "context_window": 32768}
203
- {"model_name": "OpenAI: o3 Mini", "model_id": "openai/o3-mini", "context_window": 200000}
204
- {"model_name": "Mistral: Mistral Small 3", "model_id": "mistralai/mistral-small-24b-instruct-2501", "context_window": 32768}
205
- {"model_name": "DeepSeek: R1 Distill Qwen 32B", "model_id": "deepseek/deepseek-r1-distill-qwen-32b", "context_window": 32768}
206
- {"model_name": "Perplexity: Sonar", "model_id": "perplexity/sonar", "context_window": 127072}
207
- {"model_name": "DeepSeek: R1 Distill Llama 70B", "model_id": "deepseek/deepseek-r1-distill-llama-70b", "context_window": 131072}
208
- {"model_name": "DeepSeek: R1", "model_id": "deepseek/deepseek-r1", "context_window": 64000}
209
- {"model_name": "MiniMax: MiniMax-01", "model_id": "minimax/minimax-01", "context_window": 1000192}
210
- {"model_name": "Microsoft: Phi 4", "model_id": "microsoft/phi-4", "context_window": 16384}
211
- {"model_name": "Sao10K: Llama 3.1 70B Hanami x1", "model_id": "sao10k/l3.1-70b-hanami-x1", "context_window": 16000}
212
- {"model_name": "DeepSeek: DeepSeek V3", "model_id": "deepseek/deepseek-chat", "context_window": 163840}
213
- {"model_name": "Sao10K: Llama 3.3 Euryale 70B", "model_id": "sao10k/l3.3-euryale-70b", "context_window": 131072}
214
- {"model_name": "OpenAI: o1", "model_id": "openai/o1", "context_window": 200000}
215
- {"model_name": "Meta: Llama 3.3 70B Instruct", "model_id": "meta-llama/llama-3.3-70b-instruct", "context_window": 131072}
216
- {"model_name": "Amazon: Nova Lite 1.0", "model_id": "amazon/nova-lite-v1", "context_window": 300000}
217
- {"model_name": "Amazon: Nova Micro 1.0", "model_id": "amazon/nova-micro-v1", "context_window": 128000}
218
- {"model_name": "Amazon: Nova Pro 1.0", "model_id": "amazon/nova-pro-v1", "context_window": 300000}
219
- {"model_name": "Mistral: Pixtral Large 2411", "model_id": "mistralai/pixtral-large-2411", "context_window": 131072}
220
- {"model_name": "Qwen2.5 Coder 32B Instruct", "model_id": "qwen/qwen-2.5-coder-32b-instruct", "context_window": 32768}
221
- {"model_name": "SorcererLM 8x22B", "model_id": "raifle/sorcererlm-8x22b", "context_window": 16000}
222
- {"model_name": "TheDrummer: UnslopNemo 12B", "model_id": "thedrummer/unslopnemo-12b", "context_window": 32768}
223
- {"model_name": "Anthropic: Claude 3.5 Haiku", "model_id": "anthropic/claude-3.5-haiku", "context_window": 200000}
224
- {"model_name": "Magnum v4 72B", "model_id": "anthracite-org/magnum-v4-72b", "context_window": 16384}
225
- {"model_name": "Anthropic: Claude 3.5 Sonnet", "model_id": "anthropic/claude-3.5-sonnet", "context_window": 200000}
226
- {"model_name": "Mistral: Ministral 8B", "model_id": "mistralai/ministral-8b", "context_window": 131072}
227
- {"model_name": "Mistral: Ministral 3B", "model_id": "mistralai/ministral-3b", "context_window": 131072}
228
- {"model_name": "Qwen: Qwen2.5 7B Instruct", "model_id": "qwen/qwen-2.5-7b-instruct", "context_window": 32768}
229
- {"model_name": "NVIDIA: Llama 3.1 Nemotron 70B Instruct", "model_id": "nvidia/llama-3.1-nemotron-70b-instruct", "context_window": 131072}
230
- {"model_name": "Inflection: Inflection 3 Pi", "model_id": "inflection/inflection-3-pi", "context_window": 8000}
231
- {"model_name": "Inflection: Inflection 3 Productivity", "model_id": "inflection/inflection-3-productivity", "context_window": 8000}
232
- {"model_name": "TheDrummer: Rocinante 12B", "model_id": "thedrummer/rocinante-12b", "context_window": 32768}
233
- {"model_name": "Meta: Llama 3.2 3B Instruct", "model_id": "meta-llama/llama-3.2-3b-instruct", "context_window": 131072}
234
- {"model_name": "Meta: Llama 3.2 1B Instruct", "model_id": "meta-llama/llama-3.2-1b-instruct", "context_window": 60000}
235
- {"model_name": "Meta: Llama 3.2 11B Vision Instruct", "model_id": "meta-llama/llama-3.2-11b-vision-instruct", "context_window": 131072}
236
- {"model_name": "Qwen2.5 72B Instruct", "model_id": "qwen/qwen-2.5-72b-instruct", "context_window": 32768}
237
- {"model_name": "Mistral: Pixtral 12B", "model_id": "mistralai/pixtral-12b", "context_window": 32768}
238
- {"model_name": "Sao10K: Llama 3.1 Euryale 70B v2.2", "model_id": "sao10k/l3.1-euryale-70b", "context_window": 32768}
239
- {"model_name": "Qwen: Qwen2.5-VL 7B Instruct", "model_id": "qwen/qwen-2.5-vl-7b-instruct", "context_window": 32768}
240
- {"model_name": "Nous: Hermes 3 70B Instruct", "model_id": "nousresearch/hermes-3-llama-3.1-70b", "context_window": 65536}
241
- {"model_name": "Nous: Hermes 3 405B Instruct", "model_id": "nousresearch/hermes-3-llama-3.1-405b", "context_window": 131072}
242
- {"model_name": "OpenAI: ChatGPT-4o", "model_id": "openai/chatgpt-4o-latest", "context_window": 128000}
243
- {"model_name": "Sao10K: Llama 3 8B Lunaris", "model_id": "sao10k/l3-lunaris-8b", "context_window": 8192}
244
- {"model_name": "Meta: Llama 3.1 8B Instruct", "model_id": "meta-llama/llama-3.1-8b-instruct", "context_window": 16384}
245
- {"model_name": "Meta: Llama 3.1 405B Instruct", "model_id": "meta-llama/llama-3.1-405b-instruct", "context_window": 10000}
246
- {"model_name": "Meta: Llama 3.1 70B Instruct", "model_id": "meta-llama/llama-3.1-70b-instruct", "context_window": 131072}
247
- {"model_name": "Mistral: Mistral Nemo", "model_id": "mistralai/mistral-nemo", "context_window": 131072}
248
- {"model_name": "OpenAI: GPT-4o-mini", "model_id": "openai/gpt-4o-mini", "context_window": 128000}
249
- {"model_name": "Google: Gemma 2 27B", "model_id": "google/gemma-2-27b-it", "context_window": 8192}
250
- {"model_name": "Google: Gemma 2 9B", "model_id": "google/gemma-2-9b-it", "context_window": 8192}
251
- {"model_name": "Sao10k: Llama 3 Euryale 70B v2.1", "model_id": "sao10k/l3-euryale-70b", "context_window": 8192}
252
- {"model_name": "NousResearch: Hermes 2 Pro - Llama-3 8B", "model_id": "nousresearch/hermes-2-pro-llama-3-8b", "context_window": 8192}
253
- {"model_name": "Mistral: Mistral 7B Instruct", "model_id": "mistralai/mistral-7b-instruct", "context_window": 32768}
254
- {"model_name": "Mistral: Mistral 7B Instruct v0.3", "model_id": "mistralai/mistral-7b-instruct-v0.3", "context_window": 32768}
255
- {"model_name": "Meta: LlamaGuard 2 8B", "model_id": "meta-llama/llama-guard-2-8b", "context_window": 8192}
256
- {"model_name": "OpenAI: GPT-4o", "model_id": "openai/gpt-4o", "context_window": 128000}
257
- {"model_name": "Meta: Llama 3 70B Instruct", "model_id": "meta-llama/llama-3-70b-instruct", "context_window": 8192}
258
- {"model_name": "Meta: Llama 3 8B Instruct", "model_id": "meta-llama/llama-3-8b-instruct", "context_window": 8192}
259
- {"model_name": "Mistral: Mixtral 8x22B Instruct", "model_id": "mistralai/mixtral-8x22b-instruct", "context_window": 65536}
260
- {"model_name": "WizardLM-2 8x22B", "model_id": "microsoft/wizardlm-2-8x22b", "context_window": 65536}
261
- {"model_name": "OpenAI: GPT-4 Turbo", "model_id": "openai/gpt-4-turbo", "context_window": 128000}
262
- {"model_name": "Anthropic: Claude 3 Haiku", "model_id": "anthropic/claude-3-haiku", "context_window": 200000}
263
- {"model_name": "Mistral: Mistral Large", "model_id": "mistralai/mistral-large", "context_window": 128000}
264
- {"model_name": "OpenAI: GPT-4 Turbo Preview", "model_id": "openai/gpt-4-turbo-preview", "context_window": 128000}
265
- {"model_name": "Mistral Tiny", "model_id": "mistralai/mistral-tiny", "context_window": 32768}
266
- {"model_name": "Mistral: Mistral 7B Instruct v0.2", "model_id": "mistralai/mistral-7b-instruct-v0.2", "context_window": 32768}
267
- {"model_name": "Mistral: Mixtral 8x7B Instruct", "model_id": "mistralai/mixtral-8x7b-instruct", "context_window": 32768}
268
- {"model_name": "Noromaid 20B", "model_id": "neversleep/noromaid-20b", "context_window": 4096}
269
- {"model_name": "Goliath 120B", "model_id": "alpindale/goliath-120b", "context_window": 6144}
270
- {"model_name": "OpenAI: GPT-3.5 Turbo Instruct", "model_id": "openai/gpt-3.5-turbo-instruct", "context_window": 4095}
271
- {"model_name": "Mistral: Mistral 7B Instruct v0.1", "model_id": "mistralai/mistral-7b-instruct-v0.1", "context_window": 2824}
272
- {"model_name": "OpenAI: GPT-3.5 Turbo 16k", "model_id": "openai/gpt-3.5-turbo-16k", "context_window": 16385}
273
- {"model_name": "ReMM SLERP 13B", "model_id": "undi95/remm-slerp-l2-13b", "context_window": 6144}
274
- {"model_name": "MythoMax 13B", "model_id": "gryphe/mythomax-l2-13b", "context_window": 4096}
275
- {"model_name": "OpenAI: GPT-4", "model_id": "openai/gpt-4", "context_window": 8191}
276
- {"model_name": "OpenAI: GPT-3.5 Turbo", "model_id": "openai/gpt-3.5-turbo", "context_window": 16385}
277
- {"model_name": "Sherlock Dash Alpha", "model_id": "openrouter/sherlock-dash-alpha", "context_window": 1840000}
278
- {"model_name": "Sherlock Think Alpha", "model_id": "openrouter/sherlock-think-alpha", "context_window": 1840000}
279
- {"model_name": "Polaris Alpha", "model_id": "openrouter/polaris-alpha", "context_window": 256000}
280
- {"model_name": "Andromeda Alpha", "model_id": "openrouter/andromeda-alpha", "context_window": 128000}
281
- {"model_name": "Arcee AI: AFM 4.5B", "model_id": "arcee-ai/afm-4.5b", "context_window": 65536}
282
- {"model_name": "ByteDance: Seed OSS 36B Instruct", "model_id": "bytedance/seed-oss-36b-instruct", "context_window": 131072}
283
- {"model_name": "Deep Cogito: Cogito V2 Preview Deepseek 671B", "model_id": "deepcogito/cogito-v2-preview-deepseek-671b", "context_window": 131072}
284
- {"model_name": "DeepSeek: DeepSeek V3.1 Base", "model_id": "deepseek/deepseek-v3.1-base", "context_window": 163840}
285
- {"model_name": "THUDM: GLM 4.1V 9B Thinking", "model_id": "thudm/glm-4.1v-9b-thinking", "context_window": 65536}
286
- {"model_name": "Cypher Alpha", "model_id": "openrouter/cypher-alpha", "context_window": 1000000}
287
- {"model_name": "Morph: Fast Apply", "model_id": "morph/morph-v2", "context_window": 32000}
288
- {"model_name": "Mistral: Magistral Small 2506", "model_id": "mistralai/magistral-small-2506", "context_window": 40000}
289
- {"model_name": "Mistral: Magistral Medium 2506", "model_id": "mistralai/magistral-medium-2506", "context_window": 40960}
290
- {"model_name": "SentientAGI: Dobby Mini Plus Llama 3.1 8B", "model_id": "sentientagi/dobby-mini-unhinged-plus-llama-3.1-8b", "context_window": 131072}
291
- {"model_name": "DeepSeek: R1 Distill Qwen 7B", "model_id": "deepseek/deepseek-r1-distill-qwen-7b", "context_window": 131072}
292
- {"model_name": "DeepSeek: DeepSeek R1 0528 Qwen3 8B", "model_id": "deepseek/deepseek-r1-0528-qwen3-8b", "context_window": 131072}
293
- {"model_name": "Google: Gemma 1 2B", "model_id": "google/gemma-2b-it", "context_window": 8192}
294
- {"model_name": "Sarvam AI: Sarvam-M", "model_id": "sarvamai/sarvam-m", "context_window": 32768}
295
- {"model_name": "TheDrummer: Valkyrie 49B V1", "model_id": "thedrummer/valkyrie-49b-v1", "context_window": 131072}
296
- {"model_name": "Mistral: Devstral Small 2505", "model_id": "mistralai/devstral-small-2505", "context_window": 131072}
297
- {"model_name": "OpenAI: Codex Mini", "model_id": "openai/codex-mini", "context_window": 200000}
298
- {"model_name": "Meta: Llama 3.3 8B Instruct", "model_id": "meta-llama/llama-3.3-8b-instruct", "context_window": 128000}
299
- {"model_name": "Arcee AI: Caller Large", "model_id": "arcee-ai/caller-large", "context_window": 32768}
300
- {"model_name": "Arcee AI: Virtuoso Medium V2", "model_id": "arcee-ai/virtuoso-medium-v2", "context_window": 131072}
301
- {"model_name": "Arcee AI: Arcee Blitz", "model_id": "arcee-ai/arcee-blitz", "context_window": 32768}
302
- {"model_name": "Qwen: Qwen3 0.6B", "model_id": "qwen/qwen3-0.6b-04-28", "context_window": 32000}
303
- {"model_name": "Qwen: Qwen3 1.7B", "model_id": "qwen/qwen3-1.7b", "context_window": 32000}
304
- {"model_name": "OpenGVLab: InternVL3 14B", "model_id": "opengvlab/internvl3-14b", "context_window": 32000}
305
- {"model_name": "OpenGVLab: InternVL3 2B", "model_id": "opengvlab/internvl3-2b", "context_window": 32000}
306
- {"model_name": "DeepSeek: DeepSeek Prover V2", "model_id": "deepseek/deepseek-prover-v2", "context_window": 163840}
307
- {"model_name": "THUDM: GLM Z1 Rumination 32B", "model_id": "thudm/glm-z1-rumination-32b", "context_window": 32000}
308
- {"model_name": "THUDM: GLM Z1 9B", "model_id": "thudm/glm-z1-9b", "context_window": 32000}
309
- {"model_name": "THUDM: GLM 4 9B", "model_id": "thudm/glm-4-9b", "context_window": 32000}
310
- {"model_name": "Microsoft: MAI DS R1", "model_id": "microsoft/mai-ds-r1", "context_window": 163840}
311
- {"model_name": "THUDM: GLM Z1 32B", "model_id": "thudm/glm-z1-32b", "context_window": 32768}
312
- {"model_name": "THUDM: GLM 4 32B", "model_id": "thudm/glm-4-32b", "context_window": 32768}
313
- {"model_name": "ArliAI: QwQ 32B RpR v1", "model_id": "arliai/qwq-32b-arliai-rpr-v1", "context_window": 32768}
314
- {"model_name": "Agentica: Deepcoder 14B Preview", "model_id": "agentica-org/deepcoder-14b-preview", "context_window": 96000}
315
- {"model_name": "MoonshotAI: Kimi VL A3B Thinking", "model_id": "moonshotai/kimi-vl-a3b-thinking", "context_window": 131072}
316
- {"model_name": "Optimus Alpha", "model_id": "openrouter/optimus-alpha", "context_window": 1000000}
317
- {"model_name": "NVIDIA: Llama 3.1 Nemotron Nano 8B v1", "model_id": "nvidia/llama-3.1-nemotron-nano-8b-v1", "context_window": 131072}
318
- {"model_name": "NVIDIA: Llama 3.3 Nemotron Super 49B v1", "model_id": "nvidia/llama-3.3-nemotron-super-49b-v1", "context_window": 131072}
319
- {"model_name": "Swallow: Llama 3.1 Swallow 8B Instruct V0.3", "model_id": "tokyotech-llm/llama-3.1-swallow-8b-instruct-v0.3", "context_window": 16384}
320
- {"model_name": "Quasar Alpha", "model_id": "openrouter/quasar-alpha", "context_window": 1000000}
321
- {"model_name": "OpenHands LM 32B V0.1", "model_id": "all-hands/openhands-lm-32b-v0.1", "context_window": 131072}
322
- {"model_name": "DeepSeek: DeepSeek V3 Base", "model_id": "deepseek/deepseek-v3-base", "context_window": 131072}
323
- {"model_name": "Typhoon2 8B Instruct", "model_id": "scb10x/llama3.1-typhoon2-8b-instruct", "context_window": 8192}
324
- {"model_name": "Typhoon2 70B Instruct", "model_id": "scb10x/llama3.1-typhoon2-70b-instruct", "context_window": 8192}
325
- {"model_name": "Bytedance: UI-TARS 72B", "model_id": "bytedance-research/ui-tars-72b", "context_window": 32768}
326
- {"model_name": "Qwen: Qwen2.5 VL 3B Instruct", "model_id": "qwen/qwen2.5-vl-3b-instruct", "context_window": 64000}
327
- {"model_name": "Google: Gemini 2.5 Pro Experimental", "model_id": "google/gemini-2.5-pro-exp-03-25", "context_window": 1048576}
328
- {"model_name": "Qrwkv 72B", "model_id": "featherless/qwerky-72b", "context_window": 32768}
329
- {"model_name": "OlympicCoder 32B", "model_id": "open-r1/olympiccoder-32b", "context_window": 32768}
330
- {"model_name": "SteelSkull: L3.3 Electra R1 70B", "model_id": "steelskull/l3.3-electra-r1-70b", "context_window": 128000}
331
- {"model_name": "Google: Gemma 3 1B", "model_id": "google/gemma-3-1b-it", "context_window": 32000}
332
- {"model_name": "AI21: Jamba 1.6 Large", "model_id": "ai21/jamba-1.6-large", "context_window": 256000}
333
- {"model_name": "AI21: Jamba Mini 1.6", "model_id": "ai21/jamba-1.6-mini", "context_window": 256000}
334
- {"model_name": "Reka: Flash 3", "model_id": "rekaai/reka-flash-3", "context_window": 32000}
335
- {"model_name": "LatitudeGames: Wayfarer Large 70B Llama 3.3", "model_id": "latitudegames/wayfarer-large-70b-llama-3.3", "context_window": 128000}
336
- {"model_name": "Microsoft: Phi 4 Multimodal Instruct", "model_id": "microsoft/phi-4-multimodal-instruct", "context_window": 131072}
337
- {"model_name": "DeepSeek: DeepSeek R1 Zero", "model_id": "deepseek/deepseek-r1-zero", "context_window": 163840}
338
- {"model_name": "Qwen: Qwen2.5 32B Instruct", "model_id": "qwen/qwen2.5-32b-instruct", "context_window": 131072}
339
- {"model_name": "MoonshotAI: Moonlight 16B A3B Instruct", "model_id": "moonshotai/moonlight-16b-a3b-instruct", "context_window": 8192}
340
- {"model_name": "Nous: DeepHermes 3 Llama 3 8B Preview", "model_id": "nousresearch/deephermes-3-llama-3-8b-preview", "context_window": 131072}
341
- {"model_name": "Perplexity: R1 1776", "model_id": "perplexity/r1-1776", "context_window": 128000}
342
- {"model_name": "Dolphin3.0 R1 Mistral 24B", "model_id": "cognitivecomputations/dolphin3.0-r1-mistral-24b", "context_window": 32768}
343
- {"model_name": "Dolphin3.0 Mistral 24B", "model_id": "cognitivecomputations/dolphin3.0-mistral-24b", "context_window": 32768}
344
- {"model_name": "deepseek/deepseek-r1-distill-llama-8b\t--\t--0", "model_id": "DeepSeek: R1 Distill Qwen 1.5B", "context_window": 131072}
345
- {"model_name": "DeepSeek: R1 Distill Qwen 14B", "model_id": "deepseek/deepseek-r1-distill-qwen-14b", "context_window": 131072}
346
- {"model_name": "Perplexity: Sonar Reasoning", "model_id": "perplexity/sonar-reasoning", "context_window": 127000}
347
- {"model_name": "Liquid: LFM 7B", "model_id": "liquid/lfm-7b", "context_window": 32768}
348
- {"model_name": "Liquid: LFM 3B", "model_id": "liquid/lfm-3b", "context_window": 32768}
349
- {"model_name": "Mistral: Codestral 2501", "model_id": "mistralai/codestral-2501", "context_window": 256000}
350
- {"model_name": "Inflatebot: Mag Mell R1 12B", "model_id": "inflatebot/mn-mag-mell-r1", "context_window": 32000}
351
- {"model_name": "EVA Llama 3.33 70B", "model_id": "eva-unit-01/eva-llama-3.33-70b", "context_window": 16384}
352
- {"model_name": "xAI: Grok 2 Vision 1212", "model_id": "x-ai/grok-2-vision-1212", "context_window": 32768}
353
- {"model_name": "xAI: Grok 2 1212", "model_id": "x-ai/grok-2-1212", "context_window": 131072}
354
- {"model_name": "Qwen: QwQ 32B Preview", "model_id": "qwen/qwq-32b-preview", "context_window": 32768}
355
- {"model_name": "EVA Qwen2.5 72B", "model_id": "eva-unit-01/eva-qwen-2.5-72b", "context_window": 32000}
356
- {"model_name": "xAI: Grok Vision Beta", "model_id": "x-ai/grok-vision-beta", "context_window": 8192}
357
- {"model_name": "Infermatic: Mistral Nemo Inferor 12B", "model_id": "infermatic/mn-inferor-12b", "context_window": 32000}
358
- {"model_name": "EVA Qwen2.5 32B", "model_id": "eva-unit-01/eva-qwen-2.5-32b", "context_window": 32000}
359
- {"model_name": "xAI: Grok Beta", "model_id": "x-ai/grok-beta", "context_window": 131072}
360
- {"model_name": "xAI: Grok 2", "model_id": "x-ai/grok-2", "context_window": 32768}
361
- {"model_name": "xAI: Grok 2 mini", "model_id": "x-ai/grok-2-mini", "context_window": 32768}
362
- {"model_name": "Google: Gemini 1.5 Flash 8B", "model_id": "google/gemini-flash-1.5-8b", "context_window": 1000000}
363
- {"model_name": "Liquid: LFM 40B MoE", "model_id": "liquid/lfm-40b", "context_window": 32768}
364
- {"model_name": "EVA Qwen2.5 14B", "model_id": "eva-unit-01/eva-qwen-2.5-14b", "context_window": 32768}
365
- {"model_name": "Magnum v2 72B", "model_id": "anthracite-org/magnum-v2-72b", "context_window": 32768}
366
- {"model_name": "Meta: Llama 3.2 90B Vision Instruct", "model_id": "meta-llama/llama-3.2-90b-vision-instruct", "context_window": 131072}
367
- {"model_name": "OpenAI: o1-mini", "model_id": "openai/o1-mini", "context_window": 128000}
368
- {"model_name": "OpenAI: o1-preview", "model_id": "openai/o1-preview", "context_window": 128000}
369
- {"model_name": "Google: Gemini 1.5 Flash Experimental", "model_id": "google/gemini-flash-1.5-exp", "context_window": 1000000}
370
- {"model_name": "Lynn: Llama 3 Soliloquy 7B v3 32K", "model_id": "lynn/soliloquy-v3", "context_window": 32768}
371
- {"model_name": "AI21: Jamba 1.5 Mini", "model_id": "ai21/jamba-1-5-mini", "context_window": 256000}
372
- {"model_name": "Yi 1.5 34B Chat", "model_id": "01-ai/yi-1.5-34b-chat", "context_window": 4096}
373
- {"model_name": "AI21: Jamba 1.5 Large", "model_id": "ai21/jamba-1-5-large", "context_window": 256000}
374
- {"model_name": "Microsoft: Phi-3.5 Mini 128K Instruct", "model_id": "microsoft/phi-3.5-mini-128k-instruct", "context_window": 128000}
375
- {"model_name": "Aetherwiing: Starcannon 12B", "model_id": "aetherwiing/mn-starcannon-12b", "context_window": 12000}
376
- {"model_name": "01.AI: Yi Vision", "model_id": "01-ai/yi-vision", "context_window": 16384}
377
- {"model_name": "01.AI: Yi Large FC", "model_id": "01-ai/yi-large-fc", "context_window": 16384}
378
- {"model_name": "01.AI: Yi Large Turbo", "model_id": "01-ai/yi-large-turbo", "context_window": 4096}
379
- {"model_name": "Mistral Nemo 12B Celeste", "model_id": "nothingiisreal/mn-celeste-12b", "context_window": 32000}
380
- {"model_name": "Perplexity: Llama 3.1 Sonar 70B Online", "model_id": "perplexity/llama-3.1-sonar-large-128k-online", "context_window": 127072}
381
- {"model_name": "Perplexity: Llama 3.1 Sonar 8B Online", "model_id": "perplexity/llama-3.1-sonar-small-128k-online", "context_window": 127072}
382
- {"model_name": "Google: Gemini 1.5 Pro Experimental", "model_id": "google/gemini-pro-1.5-exp", "context_window": 1000000}
383
- {"model_name": "Dolphin Llama 3 70B \ud83d\udc2c", "model_id": "cognitivecomputations/dolphin-llama-3-70b", "context_window": 8192}
384
- {"model_name": "Mistral: Codestral Mamba", "model_id": "mistralai/codestral-mamba", "context_window": 256000}
385
- {"model_name": "Qwen 2 7B Instruct", "model_id": "qwen/qwen-2-7b-instruct", "context_window": 32768}
386
- {"model_name": "Magnum 72B", "model_id": "alpindale/magnum-72b", "context_window": 16384}
387
- {"model_name": "Nous: Hermes 2 Theta 8B", "model_id": "nousresearch/hermes-2-theta-llama-3-8b", "context_window": 16384}
388
- {"model_name": "Sao10K: Llama 3 Stheno 8B v3.3 32K", "model_id": "sao10k/l3-stheno-8b", "context_window": 32000}
389
- {"model_name": "AI21: Jamba Instruct", "model_id": "ai21/jamba-instruct", "context_window": 256000}
390
- {"model_name": "01.AI: Yi Large", "model_id": "01-ai/yi-large", "context_window": 32768}
391
- {"model_name": "NVIDIA: Nemotron-4 340B Instruct", "model_id": "nvidia/nemotron-4-340b-instruct", "context_window": 4096}
392
- {"model_name": "Microsoft: Phi-3 Medium 4K Instruct", "model_id": "microsoft/phi-3-medium-4k-instruct", "context_window": 4000}
393
- {"model_name": "StarCoder2 15B Instruct", "model_id": "bigcode/starcoder2-15b-instruct", "context_window": 16384}
394
- {"model_name": "Dolphin 2.9.2 Mixtral 8x22B \ud83d\udc2c", "model_id": "cognitivecomputations/dolphin-mixtral-8x22b", "context_window": 65536}
395
- {"model_name": "Qwen 2 72B Instruct", "model_id": "qwen/qwen-2-72b-instruct", "context_window": 32768}
396
- {"model_name": "OpenChat 3.6 8B", "model_id": "openchat/openchat-8b", "context_window": 8192}
397
- {"model_name": "Microsoft: Phi-3 Mini 128K Instruct", "model_id": "microsoft/phi-3-mini-128k-instruct", "context_window": 128000}
398
- {"model_name": "Microsoft: Phi-3 Medium 128K Instruct", "model_id": "microsoft/phi-3-medium-128k-instruct", "context_window": 128000}
399
- {"model_name": "NeverSleep: Llama 3 Lumimaid 70B", "model_id": "neversleep/llama-3-lumimaid-70b", "context_window": 8192}
400
- {"model_name": "Perplexity: Llama3 Sonar 70B", "model_id": "perplexity/llama-3-sonar-large-32k-chat", "context_window": 32768}
401
- {"model_name": "Perplexity: Llama3 Sonar 8B Online", "model_id": "perplexity/llama-3-sonar-small-32k-online", "context_window": 28000}
402
- {"model_name": "Perplexity: Llama3 Sonar 8B", "model_id": "perplexity/llama-3-sonar-small-32k-chat", "context_window": 32768}
403
- {"model_name": "DeepSeek V2.5", "model_id": "deepseek/deepseek-chat-v2.5", "context_window": 128000}
404
- {"model_name": "Perplexity: Llama3 Sonar 70B Online", "model_id": "perplexity/llama-3-sonar-large-32k-online", "context_window": 28000}
405
- {"model_name": "Google: Gemini 1.5 Flash", "model_id": "google/gemini-flash-1.5", "context_window": 1000000}
406
- {"model_name": "LLaVA v1.6 34B", "model_id": "liuhaotian/llava-yi-34b", "context_window": 4096}
407
- {"model_name": "OLMo 7B Instruct", "model_id": "allenai/olmo-7b-instruct", "context_window": 2048}
408
- {"model_name": "Qwen 1.5 4B Chat", "model_id": "qwen/qwen-4b-chat", "context_window": 32768}
409
- {"model_name": "Qwen 1.5 7B Chat", "model_id": "qwen/qwen-7b-chat", "context_window": 32768}
410
- {"model_name": "Qwen 1.5 14B Chat", "model_id": "qwen/qwen-14b-chat", "context_window": 32768}
411
- {"model_name": "Qwen 1.5 32B Chat", "model_id": "qwen/qwen-32b-chat", "context_window": 32768}
412
- {"model_name": "Qwen 1.5 72B Chat", "model_id": "qwen/qwen-72b-chat", "context_window": 32768}
413
- {"model_name": "Qwen 1.5 110B Chat", "model_id": "qwen/qwen-110b-chat", "context_window": 32768}
414
- {"model_name": "NeverSleep: Llama 3 Lumimaid 8B", "model_id": "neversleep/llama-3-lumimaid-8b", "context_window": 24576}
415
- {"model_name": "Snowflake: Arctic Instruct", "model_id": "snowflake/snowflake-arctic-instruct", "context_window": 4096}
416
- {"model_name": "Fireworks: FireLLaVA 13B", "model_id": "fireworks/firellava-13b", "context_window": 4096}
417
- {"model_name": "Lynn: Llama 3 Soliloquy 8B v2", "model_id": "lynn/soliloquy-l3", "context_window": 24576}
418
- {"model_name": "Fimbulvetr 11B v2", "model_id": "sao10k/fimbulvetr-11b-v2", "context_window": 8192}
419
- {"model_name": "WizardLM-2 7B", "model_id": "microsoft/wizardlm-2-7b", "context_window": 32000}
420
- {"model_name": "Zephyr 141B-A35B", "model_id": "huggingfaceh4/zephyr-orpo-141b-a35b", "context_window": 65536}
421
- {"model_name": "Google: Gemini 1.5 Pro", "model_id": "google/gemini-pro-1.5", "context_window": 2000000}
422
- {"model_name": "Cohere: Command R+", "model_id": "cohere/command-r-plus", "context_window": 128000}
423
- {"model_name": "Databricks: DBRX 132B Instruct", "model_id": "databricks/dbrx-instruct", "context_window": 32768}
424
- {"model_name": "Midnight Rose 70B", "model_id": "sophosympatheia/midnight-rose-70b", "context_window": 4096}
425
- {"model_name": "Cohere: Command R", "model_id": "cohere/command-r", "context_window": 128000}
426
- {"model_name": "Cohere: Command", "model_id": "cohere/command", "context_window": 4096}
427
- {"model_name": "Anthropic: Claude 3 Sonnet", "model_id": "anthropic/claude-3-sonnet", "context_window": 200000}
428
- {"model_name": "Anthropic: Claude 3 Opus", "model_id": "anthropic/claude-3-opus", "context_window": 200000}
429
- {"model_name": "Google: Gemma 7B", "model_id": "google/gemma-7b-it", "context_window": 8192}
430
- {"model_name": "Nous: Hermes 2 Mistral 7B DPO", "model_id": "nousresearch/nous-hermes-2-mistral-7b-dpo", "context_window": 8192}
431
- {"model_name": "Meta: CodeLlama 70B Instruct", "model_id": "meta-llama/codellama-70b-instruct", "context_window": 2048}
432
- {"model_name": "RWKV v5: Eagle 7B", "model_id": "recursal/eagle-7b", "context_window": 10000}
433
- {"model_name": "Yi 34B 200K", "model_id": "01-ai/yi-34b-200k", "context_window": 200000}
434
- {"model_name": "Nous: Hermes 2 Mixtral 8x7B SFT", "model_id": "nousresearch/nous-hermes-2-mixtral-8x7b-sft", "context_window": 32768}
435
- {"model_name": "Nous: Hermes 2 Mixtral 8x7B DPO", "model_id": "nousresearch/nous-hermes-2-mixtral-8x7b-dpo", "context_window": 32768}
436
- {"model_name": "Mistral Small", "model_id": "mistralai/mistral-small", "context_window": 32000}
437
- {"model_name": "Mistral Medium", "model_id": "mistralai/mistral-medium", "context_window": 32000}
438
- {"model_name": "Noromaid Mixtral 8x7B Instruct", "model_id": "neversleep/noromaid-mixtral-8x7b-instruct", "context_window": 8000}
439
- {"model_name": "Nous: Hermes 2 Yi 34B", "model_id": "nousresearch/nous-hermes-yi-34b", "context_window": 4096}
440
- {"model_name": "Dolphin 2.6 Mixtral 8x7B \ud83d\udc2c", "model_id": "cognitivecomputations/dolphin-mixtral-8x7b", "context_window": 32768}
441
- {"model_name": "RWKV v5 3B AI Town", "model_id": "recursal/rwkv-5-3b-ai-town", "context_window": 10000}
442
- {"model_name": "RWKV v5 World 3B", "model_id": "rwkv/rwkv-5-world-3b", "context_window": 10000}
443
- {"model_name": "StripedHyena Nous 7B", "model_id": "togethercomputer/stripedhyena-nous-7b", "context_window": 32768}
444
- {"model_name": "Psyfighter v2 13B", "model_id": "koboldai/psyfighter-13b-2", "context_window": 4096}
445
- {"model_name": "Yi 34B Chat", "model_id": "01-ai/yi-34b-chat", "context_window": 4096}
446
- {"model_name": "Nous: Capybara 7B", "model_id": "nousresearch/nous-capybara-7b", "context_window": 8192}
447
- {"model_name": "Psyfighter 13B", "model_id": "jebcarter/psyfighter-13b", "context_window": 4096}
448
- {"model_name": "OpenChat 3.5 7B", "model_id": "openchat/openchat-7b", "context_window": 8192}
449
- {"model_name": "Neural Chat 7B v3.1", "model_id": "intel/neural-chat-7b", "context_window": 4096}
450
- {"model_name": "Anthropic: Claude Instant v1.1", "model_id": "anthropic/claude-instant-1.1", "context_window": 100000}
451
- {"model_name": "Anthropic: Claude v2", "model_id": "anthropic/claude-2", "context_window": 200000}
452
- {"model_name": "Anthropic: Claude v2.1", "model_id": "anthropic/claude-2.1", "context_window": 200000}
453
- {"model_name": "OpenHermes 2.5 Mistral 7B", "model_id": "teknium/openhermes-2.5-mistral-7b", "context_window": 4096}
454
- {"model_name": "LLaVA 13B", "model_id": "liuhaotian/llava-13b", "context_window": 2048}
455
- {"model_name": "Nous: Capybara 34B", "model_id": "nousresearch/nous-capybara-34b", "context_window": 200000}
456
- {"model_name": "lzlv 70B", "model_id": "lizpreciatior/lzlv-70b-fp16-hf", "context_window": 4096}
457
- {"model_name": "Toppy M 7B", "model_id": "undi95/toppy-m-7b", "context_window": 4096}
458
- {"model_name": "Auto Router", "model_id": "openrouter/auto", "context_window": 2000000}
459
- {"model_name": "Google: PaLM 2 Code Chat 32k", "model_id": "google/palm-2-codechat-bison-32k", "context_window": 32760}
460
- {"model_name": "Google: PaLM 2 Chat 32k", "model_id": "google/palm-2-chat-bison-32k", "context_window": 32760}
461
- {"model_name": "OpenHermes 2 Mistral 7B", "model_id": "teknium/openhermes-2-mistral-7b", "context_window": 8192}
462
- {"model_name": "Mistral OpenOrca 7B", "model_id": "open-orca/mistral-7b-openorca", "context_window": 8192}
463
- {"model_name": "Airoboros 70B", "model_id": "jondurbin/airoboros-l2-70b", "context_window": 4096}
464
- {"model_name": "Nous: Hermes 70B", "model_id": "nousresearch/nous-hermes-llama2-70b", "context_window": 4096}
465
- {"model_name": "Xwin 70B", "model_id": "xwin-lm/xwin-lm-70b", "context_window": 8192}
466
- {"model_name": "Synthia 70B", "model_id": "migtissera/synthia-70b", "context_window": 8192}
467
- {"model_name": "Pygmalion: Mythalion 13B", "model_id": "pygmalionai/mythalion-13b", "context_window": 8192}
468
- {"model_name": "Nous: Hermes 13B", "model_id": "nousresearch/nous-hermes-llama2-13b", "context_window": 4096}
469
- {"model_name": "Phind: CodeLlama 34B v2", "model_id": "phind/phind-codellama-34b", "context_window": 4096}
470
- {"model_name": "Meta: CodeLlama 34B Instruct", "model_id": "meta-llama/codellama-34b-instruct", "context_window": 8192}
471
- {"model_name": "Hugging Face: Zephyr 7B", "model_id": "huggingfaceh4/zephyr-7b-beta", "context_window": 4096}
472
- {"model_name": "Anthropic: Claude Instant v1.0", "model_id": "anthropic/claude-instant-1.0", "context_window": 100000}
473
- {"model_name": "Anthropic: Claude v1.2", "model_id": "anthropic/claude-1.2", "context_window": 100000}
474
- {"model_name": "Anthropic: Claude v1", "model_id": "anthropic/claude-1", "context_window": 100000}
475
- {"model_name": "Anthropic: Claude Instant v1", "model_id": "anthropic/claude-instant-1", "context_window": 100000}
476
- {"model_name": "Anthropic: Claude v2.0", "model_id": "anthropic/claude-2.0", "context_window": 100000}
477
- {"model_name": "Google: PaLM 2 Code Chat", "model_id": "google/palm-2-codechat-bison", "context_window": 7168}
478
- {"model_name": "Google: PaLM 2 Chat", "model_id": "google/palm-2-chat-bison", "context_window": 9216}
479
- {"model_name": "Meta: Llama 2 70B Chat", "model_id": "meta-llama/llama-2-70b-chat", "context_window": 4096}
480
- {"model_name": "Meta: Llama 2 13B Chat", "model_id": "meta-llama/llama-2-13b-chat", "context_window": 4096}
481
- {"model_name": "OpenAI: GPT-3.5 Turbo 16k", "model_id": "openai/gpt-3.5-turbo-0125", "context_window": 16385}