_leaderboard stringclasses 1
value | _developer stringclasses 559
values | _model stringlengths 9 102 | _uuid stringlengths 36 36 | schema_version stringclasses 1
value | evaluation_id stringlengths 35 133 | retrieved_timestamp stringlengths 13 18 | source_data stringclasses 1
value | evaluation_source_name stringclasses 1
value | evaluation_source_type stringclasses 1
value | source_organization_name stringclasses 1
value | source_organization_url null | source_organization_logo_url null | evaluator_relationship stringclasses 1
value | model_name stringlengths 4 102 | model_id stringlengths 9 102 | model_developer stringclasses 559
values | model_inference_platform stringclasses 1
value | evaluation_results stringlengths 1.35k 1.41k | additional_details stringclasses 660
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
HF Open LLM v2 | prince-canuma | prince-canuma/Ministral-8B-Instruct-2410-HF | f98bc033-55c9-45c1-a101-3881507bb733 | 0.0.1 | hfopenllm_v2/prince-canuma_Ministral-8B-Instruct-2410-HF/1762652580.442474 | 1762652580.442475 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prince-canuma/Ministral-8B-Instruct-2410-HF | prince-canuma/Ministral-8B-Instruct-2410-HF | prince-canuma | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5911636679565775}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 8.02} |
HF Open LLM v2 | mlx-community | mlx-community/Josiefied-Qwen2.5-0.5B-Instruct-abliterated-v1-float32 | 9bf2a7e3-e744-4ac0-853a-f5cec8ef9c57 | 0.0.1 | hfopenllm_v2/mlx-community_Josiefied-Qwen2.5-0.5B-Instruct-abliterated-v1-float32/1762652580.3704169 | 1762652580.3704178 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mlx-community/Josiefied-Qwen2.5-0.5B-Instruct-abliterated-v1-float32 | mlx-community/Josiefied-Qwen2.5-0.5B-Instruct-abliterated-v1-float32 | mlx-community | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3368983186833158}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | mlx-community | mlx-community/Mistral-Small-24B-Instruct-2501-bf16 | d769592a-faa3-4269-abac-373679f42c62 | 0.0.1 | hfopenllm_v2/mlx-community_Mistral-Small-24B-Instruct-2501-bf16/1762652580.3707452 | 1762652580.3707461 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mlx-community/Mistral-Small-24B-Instruct-2501-bf16 | mlx-community/Mistral-Small-24B-Instruct-2501-bf16 | mlx-community | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6282829558903709}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | yfzp | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_bt_8b-table | c19ed336-aadf-4af3-a0e5-1c1946a17ce4 | 0.0.1 | hfopenllm_v2/yfzp_Llama-3-8B-Instruct-SPPO-Iter1_bt_8b-table/1762652580.605978 | 1762652580.605979 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_bt_8b-table | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_bt_8b-table | yfzp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7332710541363582}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | yfzp | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_bt_2b-table-0.001 | b1ad6a57-8cad-4cca-8dd6-00ebd35089ab | 0.0.1 | hfopenllm_v2/yfzp_Llama-3-8B-Instruct-SPPO-score-Iter1_bt_2b-table-0.001/1762652580.606723 | 1762652580.606724 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_bt_2b-table-0.001 | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_bt_2b-table-0.001 | yfzp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6495653754260917}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | yfzp | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table | d6cadac8-17a9-430f-94b3-6eb0c7ecc146 | 0.0.1 | hfopenllm_v2/yfzp_Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table/1762652580.60626 | 1762652580.606261 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_2b-table | yfzp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6784664689690023}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | yfzp | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table | 0bdeac20-0505-459e-b417-ea4cb2f95cec | 0.0.1 | hfopenllm_v2/yfzp_Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table/1762652580.6064892 | 1762652580.6064901 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_gp_8b-table | yfzp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7131876753680235}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | yfzp | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002 | aa12336f-556c-4222-a10c-529eb74a793b | 0.0.1 | hfopenllm_v2/yfzp_Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002/1762652580.607418 | 1762652580.6074188 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002 | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_8b-table-0.002 | yfzp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7015973173402128}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | yfzp | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001 | 338737c7-29cf-44d8-be92-6749167b7c03 | 0.0.1 | hfopenllm_v2/yfzp_Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001/1762652580.6072068 | 1762652580.6072068 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001 | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_gp_2b-table-0.001 | yfzp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6504397221594258}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | yfzp | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_bt_8b-table-0.002 | 249af8cd-717b-4ee9-8ac7-740f16708675 | 0.0.1 | hfopenllm_v2/yfzp_Llama-3-8B-Instruct-SPPO-score-Iter1_bt_8b-table-0.002/1762652580.6069329 | 1762652580.606934 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_bt_8b-table-0.002 | yfzp/Llama-3-8B-Instruct-SPPO-score-Iter1_bt_8b-table-0.002 | yfzp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7196073086078272}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | yfzp | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_bt_2b-table | cd2f94a5-595a-469e-b34e-a5f9abb82e6b | 0.0.1 | hfopenllm_v2/yfzp_Llama-3-8B-Instruct-SPPO-Iter1_bt_2b-table/1762652580.605642 | 1762652580.605643 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_bt_2b-table | yfzp/Llama-3-8B-Instruct-SPPO-Iter1_bt_2b-table | yfzp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6708976626462231}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | mistral | siqi00/Mistral-7B-DFT2 | dae2a1a6-a608-4b64-a77a-e4aed87e7d7f | 0.0.1 | hfopenllm_v2/siqi00_Mistral-7B-DFT2/1762652580.5173602 | 1762652580.517361 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | siqi00/Mistral-7B-DFT2 | siqi00/Mistral-7B-DFT2 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5803723010501026}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | siqi00/Mistral-7B-DFT | e2f4255d-12ff-4c88-996d-bac6b51aaa33 | 0.0.1 | hfopenllm_v2/siqi00_Mistral-7B-DFT/1762652580.5171149 | 1762652580.5171149 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | siqi00/Mistral-7B-DFT | siqi00/Mistral-7B-DFT | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5568668909604294}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | nbeerbower/Lyra-Gutenberg-mistral-nemo-12B | 5b3de7db-009e-46c9-bf34-fe5912c39b81 | 0.0.1 | hfopenllm_v2/nbeerbower_Lyra-Gutenberg-mistral-nemo-12B/1762652580.3801112 | 1762652580.380112 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Lyra-Gutenberg-mistral-nemo-12B | nbeerbower/Lyra-Gutenberg-mistral-nemo-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.34948824674086976}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/DoppelKartoffel-Mistral-Nemo-23B | 5db2ec95-d423-4987-aaa7-b5919d1a2cc8 | 0.0.1 | hfopenllm_v2/nbeerbower_DoppelKartoffel-Mistral-Nemo-23B/1762652580.376802 | 1762652580.3768032 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/DoppelKartoffel-Mistral-Nemo-23B | nbeerbower/DoppelKartoffel-Mistral-Nemo-23B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5191480826429429}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 23.153} |
HF Open LLM v2 | mistral | nbeerbower/Mistral-Nemo-Gutenberg-Doppel-12B | 012b188f-db69-4529-bfe3-db34c77e7dc0 | 0.0.1 | hfopenllm_v2/nbeerbower_Mistral-Nemo-Gutenberg-Doppel-12B/1762652580.381143 | 1762652580.381144 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Mistral-Nemo-Gutenberg-Doppel-12B | nbeerbower/Mistral-Nemo-Gutenberg-Doppel-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3567068711020093}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/mistral-nemo-gutenberg-12B-v4 | 9f84023e-a23c-4d2c-afb3-f93629f97a6f | 0.0.1 | hfopenllm_v2/nbeerbower_mistral-nemo-gutenberg-12B-v4/1762652580.3875241 | 1762652580.387525 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/mistral-nemo-gutenberg-12B-v4 | nbeerbower/mistral-nemo-gutenberg-12B-v4 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.237929804031082}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/mistral-nemo-gutenberg-12B-v3 | b4ed9f85-c1bb-4a52-8ba6-69f4e0f8e442 | 0.0.1 | hfopenllm_v2/nbeerbower_mistral-nemo-gutenberg-12B-v3/1762652580.387317 | 1762652580.3873181 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/mistral-nemo-gutenberg-12B-v3 | nbeerbower/mistral-nemo-gutenberg-12B-v3 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21827085466562057}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/mistral-nemo-bophades3-12B | 2043110d-2b63-4133-9c53-b39b5b7869b6 | 0.0.1 | hfopenllm_v2/nbeerbower_mistral-nemo-bophades3-12B/1762652580.386282 | 1762652580.386283 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/mistral-nemo-bophades3-12B | nbeerbower/mistral-nemo-bophades3-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6577835698169745}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/mistral-nemo-bophades-12B | 1cb58f83-841d-474a-9c7b-adece8cab805 | 0.0.1 | hfopenllm_v2/nbeerbower_mistral-nemo-bophades-12B/1762652580.385997 | 1762652580.385998 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/mistral-nemo-bophades-12B | nbeerbower/mistral-nemo-bophades-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6794405510711579}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/Mahou-1.5-mistral-nemo-12B-lorablated | 0cee26b2-c3b3-40be-bc15-3fdaf7b4b38c | 0.0.1 | hfopenllm_v2/nbeerbower_Mahou-1.5-mistral-nemo-12B-lorablated/1762652580.380727 | 1762652580.380728 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Mahou-1.5-mistral-nemo-12B-lorablated | nbeerbower/Mahou-1.5-mistral-nemo-12B-lorablated | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6824880206740338}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/mistral-nemo-gutenberg-12B | 9f8c4246-9770-4790-8db0-095e722d89e9 | 0.0.1 | hfopenllm_v2/nbeerbower_mistral-nemo-gutenberg-12B/1762652580.3869052 | 1762652580.3869061 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/mistral-nemo-gutenberg-12B | nbeerbower/mistral-nemo-gutenberg-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.350386973231027}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/DoublePotato-Mistral-Nemo-13B | 03b30ba7-efc3-467e-bdde-c6a18437929b | 0.0.1 | hfopenllm_v2/nbeerbower_DoublePotato-Mistral-Nemo-13B/1762652580.377009 | 1762652580.3770099 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/DoublePotato-Mistral-Nemo-13B | nbeerbower/DoublePotato-Mistral-Nemo-13B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6796156420519777}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 13.338} |
HF Open LLM v2 | mistral | nbeerbower/Mistral-Nemo-Gutenberg-Doppel-12B-v2 | 178418ad-2d0a-40cd-a057-105bbe69f937 | 0.0.1 | hfopenllm_v2/nbeerbower_Mistral-Nemo-Gutenberg-Doppel-12B-v2/1762652580.3813472 | 1762652580.3813481 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Mistral-Nemo-Gutenberg-Doppel-12B-v2 | nbeerbower/Mistral-Nemo-Gutenberg-Doppel-12B-v2 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6535869271311232}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/BigKartoffel-mistral-nemo-20B | 95ba0175-5578-47fe-aec9-93ccf4f9f9af | 0.0.1 | hfopenllm_v2/nbeerbower_BigKartoffel-mistral-nemo-20B/1762652580.376553 | 1762652580.376553 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/BigKartoffel-mistral-nemo-20B | nbeerbower/BigKartoffel-mistral-nemo-20B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5857181168189294}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 20.427} |
HF Open LLM v2 | mistral | nbeerbower/Mistral-Small-Drummer-22B | 2e86d526-de04-4339-8495-e88c5a9f3f18 | 0.0.1 | hfopenllm_v2/nbeerbower_Mistral-Small-Drummer-22B/1762652580.3829079 | 1762652580.3829088 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Mistral-Small-Drummer-22B | nbeerbower/Mistral-Small-Drummer-22B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6331289866443259}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 22.247} |
HF Open LLM v2 | mistral | nbeerbower/Mistral-Nemo-Prism-12B-v7 | d66604f0-15b3-4ac3-b0e9-083ab6906da0 | 0.0.1 | hfopenllm_v2/nbeerbower_Mistral-Nemo-Prism-12B-v7/1762652580.382694 | 1762652580.382695 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Mistral-Nemo-Prism-12B-v7 | nbeerbower/Mistral-Nemo-Prism-12B-v7 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6961517662025647}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/mistral-nemo-gutenberg-12B-v2 | db2dee58-3a9c-4789-800d-ed7207c6699c | 0.0.1 | hfopenllm_v2/nbeerbower_mistral-nemo-gutenberg-12B-v2/1762652580.38711 | 1762652580.387111 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/mistral-nemo-gutenberg-12B-v2 | nbeerbower/mistral-nemo-gutenberg-12B-v2 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6203395878491292}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/Mistral-Nemo-Prism-12B-v2 | e5582319-d8e6-4223-97bb-a64a2cc03853 | 0.0.1 | hfopenllm_v2/nbeerbower_Mistral-Nemo-Prism-12B-v2/1762652580.3824818 | 1762652580.382483 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Mistral-Nemo-Prism-12B-v2 | nbeerbower/Mistral-Nemo-Prism-12B-v2 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6974006746543615}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/Stella-mistral-nemo-12B-v2 | ed825fd6-f749-449f-a1d6-c3ad7a82e354 | 0.0.1 | hfopenllm_v2/nbeerbower_Stella-mistral-nemo-12B-v2/1762652580.384186 | 1762652580.384186 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Stella-mistral-nemo-12B-v2 | nbeerbower/Stella-mistral-nemo-12B-v2 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.32743121584063617}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/mistral-nemo-narwhal-12B | e1bd9218-4bfb-4df1-a2bf-4a10937240dc | 0.0.1 | hfopenllm_v2/nbeerbower_mistral-nemo-narwhal-12B/1762652580.388214 | 1762652580.388215 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/mistral-nemo-narwhal-12B | nbeerbower/mistral-nemo-narwhal-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5549187267561182}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/Mistral-Gutenberg-Doppel-7B-FFT | c3eae55f-ce07-4ea2-b9d4-92e0909a8b06 | 0.0.1 | hfopenllm_v2/nbeerbower_Mistral-Gutenberg-Doppel-7B-FFT/1762652580.380932 | 1762652580.380933 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Mistral-Gutenberg-Doppel-7B-FFT | nbeerbower/Mistral-Gutenberg-Doppel-7B-FFT | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5716798095719358}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | nbeerbower/Mistral-Small-Gutenberg-Doppel-22B | 99cfc94d-3cde-4e42-924a-5c4a4c7f217a | 0.0.1 | hfopenllm_v2/nbeerbower_Mistral-Small-Gutenberg-Doppel-22B/1762652580.383116 | 1762652580.383116 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Mistral-Small-Gutenberg-Doppel-22B | nbeerbower/Mistral-Small-Gutenberg-Doppel-22B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.48932277468228746}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 22.247} |
HF Open LLM v2 | mistral | nbeerbower/mistral-nemo-gutades-12B | b83d5033-b513-4472-84c1-1b757c533137 | 0.0.1 | hfopenllm_v2/nbeerbower_mistral-nemo-gutades-12B/1762652580.3867059 | 1762652580.3867059 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/mistral-nemo-gutades-12B | nbeerbower/mistral-nemo-gutades-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3425189608017837}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/Flammades-Mistral-Nemo-12B | a6e65aeb-f0d3-48ca-8f6e-933d0ea2113b | 0.0.1 | hfopenllm_v2/nbeerbower_Flammades-Mistral-Nemo-12B/1762652580.3785129 | 1762652580.3785138 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Flammades-Mistral-Nemo-12B | nbeerbower/Flammades-Mistral-Nemo-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.38415958545548035}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/mistral-nemo-kartoffel-12B | b111507d-92e8-4af1-882a-9434d6825f51 | 0.0.1 | hfopenllm_v2/nbeerbower_mistral-nemo-kartoffel-12B/1762652580.3880079 | 1762652580.3880079 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/mistral-nemo-kartoffel-12B | nbeerbower/mistral-nemo-kartoffel-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7031709198260616}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/Hermes2-Gutenberg2-Mistral-7B | b9b08e55-0c5d-427d-914b-e4cfb4de96b8 | 0.0.1 | hfopenllm_v2/nbeerbower_Hermes2-Gutenberg2-Mistral-7B/1762652580.379175 | 1762652580.379176 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Hermes2-Gutenberg2-Mistral-7B | nbeerbower/Hermes2-Gutenberg2-Mistral-7B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.37214479802479644}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | nbeerbower/mistral-nemo-gutenberg2-12B-test | 10a4d2dc-4779-4b0f-92fa-010a6a51fe9f | 0.0.1 | hfopenllm_v2/nbeerbower_mistral-nemo-gutenberg2-12B-test/1762652580.387729 | 1762652580.38773 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/mistral-nemo-gutenberg2-12B-test | nbeerbower/mistral-nemo-gutenberg2-12B-test | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.33847192116916447}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/Gutensuppe-mistral-nemo-12B | 80a9277b-5768-4da0-96c6-3289a7b8a9bc | 0.0.1 | hfopenllm_v2/nbeerbower_Gutensuppe-mistral-nemo-12B/1762652580.378963 | 1762652580.378964 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Gutensuppe-mistral-nemo-12B | nbeerbower/Gutensuppe-mistral-nemo-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.29161070404305023}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/mistral-nemo-cc-12B | 45e38c7d-5f31-404b-8fcc-9f3cad239cd1 | 0.0.1 | hfopenllm_v2/nbeerbower_mistral-nemo-cc-12B/1762652580.386496 | 1762652580.386497 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/mistral-nemo-cc-12B | nbeerbower/mistral-nemo-cc-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.14353249378316202}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nbeerbower/Mistral-Nemo-Prism-12B | 5ea20d83-ceee-4c52-911a-e25e9cfecf0e | 0.0.1 | hfopenllm_v2/nbeerbower_Mistral-Nemo-Prism-12B/1762652580.382256 | 1762652580.382257 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nbeerbower/Mistral-Nemo-Prism-12B | nbeerbower/Mistral-Nemo-Prism-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6858103166265509}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nazimali/Mistral-Nemo-Kurdish-Instruct | 3381e897-35f3-45f4-ac05-3ca47441b772 | 0.0.1 | hfopenllm_v2/nazimali_Mistral-Nemo-Kurdish-Instruct/1762652580.376105 | 1762652580.376106 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nazimali/Mistral-Nemo-Kurdish-Instruct | nazimali/Mistral-Nemo-Kurdish-Instruct | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4963917959901949}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nazimali/Mistral-Nemo-Kurdish-Instruct | 27e58a27-f4e9-4c7a-93f2-c3b15cab8f9f | 0.0.1 | hfopenllm_v2/nazimali_Mistral-Nemo-Kurdish-Instruct/1762652580.376322 | 1762652580.376323 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nazimali/Mistral-Nemo-Kurdish-Instruct | nazimali/Mistral-Nemo-Kurdish-Instruct | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4860004787297703}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | nazimali/Mistral-Nemo-Kurdish | 0da50308-a631-4466-b2e4-2793412b31db | 0.0.1 | hfopenllm_v2/nazimali_Mistral-Nemo-Kurdish/1762652580.375733 | 1762652580.3757372 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nazimali/Mistral-Nemo-Kurdish | nazimali/Mistral-Nemo-Kurdish | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3401208792670115}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | pszemraj/Mistral-v0.3-6B | 729b4f81-32da-41d2-8fa4-d18553b37b83 | 0.0.1 | hfopenllm_v2/pszemraj_Mistral-v0.3-6B/1762652580.481565 | 1762652580.481566 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | pszemraj/Mistral-v0.3-6B | pszemraj/Mistral-v0.3-6B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2453744952282167}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 5.939} |
HF Open LLM v2 | mistral | Corianas/Neural-Mistral-7B | 4fb7a806-1176-474e-a039-b388f050cd45 | 0.0.1 | hfopenllm_v2/Corianas_Neural-Mistral-7B/1762652579.511706 | 1762652579.5117068 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Corianas/Neural-Mistral-7B | Corianas/Neural-Mistral-7B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5489235229191878}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | Dans-DiscountModels/Mistral-7b-v0.3-Test-E0.7 | 393f8623-7f38-4aaa-a460-cbdcb74c2d04 | 0.0.1 | hfopenllm_v2/Dans-DiscountModels_Mistral-7b-v0.3-Test-E0.7/1762652579.536513 | 1762652579.536514 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Dans-DiscountModels/Mistral-7b-v0.3-Test-E0.7 | Dans-DiscountModels/Mistral-7b-v0.3-Test-E0.7 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5123538876846767}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | mistral | Dans-DiscountModels/mistral-7b-test-merged | 5ba7e296-cdd3-40e8-b56f-cc44ef0c3dcb | 0.0.1 | hfopenllm_v2/Dans-DiscountModels_mistral-7b-test-merged/1762652579.536763 | 1762652579.536763 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Dans-DiscountModels/mistral-7b-test-merged | Dans-DiscountModels/mistral-7b-test-merged | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6678003253589365}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | mistral | amazon/MegaBeam-Mistral-7B-300k | 4729a245-9e2d-4f65-bf14-67db4bb2590f | 0.0.1 | hfopenllm_v2/amazon_MegaBeam-Mistral-7B-300k/1762652580.010282 | 1762652580.010283 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | amazon/MegaBeam-Mistral-7B-300k | amazon/MegaBeam-Mistral-7B-300k | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.520347123410329}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | PranavHarshan/LaMistral-V4 | 21944667-04e0-46dc-9896-eef32c26fa6b | 0.0.1 | hfopenllm_v2/PranavHarshan_LaMistral-V4/1762652579.8148758 | 1762652579.814877 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | PranavHarshan/LaMistral-V4 | PranavHarshan/LaMistral-V4 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.623861354539289}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | mistral | Marsouuu/MistralBase-4x7B-MoE-ECE-PRYMMAL-Martial | 5cd26359-d15a-4d0b-92f1-c31101e7b993 | 0.0.1 | hfopenllm_v2/Marsouuu_MistralBase-4x7B-MoE-ECE-PRYMMAL-Martial/1762652579.7477188 | 1762652579.74772 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Marsouuu/MistralBase-4x7B-MoE-ECE-PRYMMAL-Martial | Marsouuu/MistralBase-4x7B-MoE-ECE-PRYMMAL-Martial | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16973629968483622}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 24.16} |
HF Open LLM v2 | mistral | tianyil1/MistralForCausalLM_Cal_DPO | 9902ef50-5208-4053-bb90-e08c98211b3f | 0.0.1 | hfopenllm_v2/tianyil1_MistralForCausalLM_Cal_DPO/1762652580.566411 | 1762652580.566412 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tianyil1/MistralForCausalLM_Cal_DPO | tianyil1/MistralForCausalLM_Cal_DPO | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5327619604870633}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | migtissera/Tess-3-Mistral-Nemo-12B | 7ef5c287-cf98-429f-80c3-d71743612a73 | 0.0.1 | hfopenllm_v2/migtissera_Tess-3-Mistral-Nemo-12B/1762652580.358769 | 1762652580.35877 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | migtissera/Tess-3-Mistral-Nemo-12B | migtissera/Tess-3-Mistral-Nemo-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.335499807178287}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | TTTXXX01/Mistral-7B-Base-SimPO2-5e-7 | 062d38c7-07e6-4f71-a7a3-e40a187b6f77 | 0.0.1 | hfopenllm_v2/TTTXXX01_Mistral-7B-Base-SimPO2-5e-7/1762652579.911438 | 1762652579.9114392 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TTTXXX01/Mistral-7B-Base-SimPO2-5e-7 | TTTXXX01/Mistral-7B-Base-SimPO2-5e-7 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.43918912928806675}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | awnr/Mistral-7B-v0.1-signtensors-1-over-2 | 3bccbf0f-e578-426d-93bc-84364f7d8017 | 0.0.1 | hfopenllm_v2/awnr_Mistral-7B-v0.1-signtensors-1-over-2/1762652580.020659 | 1762652580.020659 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | awnr/Mistral-7B-v0.1-signtensors-1-over-2 | awnr/Mistral-7B-v0.1-signtensors-1-over-2 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21792178087474567}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | awnr/Mistral-7B-v0.1-signtensors-7-over-16 | 893da954-ca56-42ab-914d-44fbc4a6f1ff | 0.0.1 | hfopenllm_v2/awnr_Mistral-7B-v0.1-signtensors-7-over-16/1762652580.0215192 | 1762652580.02152 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | awnr/Mistral-7B-v0.1-signtensors-7-over-16 | awnr/Mistral-7B-v0.1-signtensors-7-over-16 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.22936253584932426}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | awnr/Mistral-7B-v0.1-signtensors-1-over-4 | ac1010e3-b3d8-4b61-ba79-0dcedb68619d | 0.0.1 | hfopenllm_v2/awnr_Mistral-7B-v0.1-signtensors-1-over-4/1762652580.0209029 | 1762652580.0209038 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | awnr/Mistral-7B-v0.1-signtensors-1-over-4 | awnr/Mistral-7B-v0.1-signtensors-1-over-4 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2133007087860211}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | mistral | awnr/Mistral-7B-v0.1-signtensors-5-over-16 | b0ae93c7-b251-42df-a67f-ca8b8a865937 | 0.0.1 | hfopenllm_v2/awnr_Mistral-7B-v0.1-signtensors-5-over-16/1762652580.021311 | 1762652580.021312 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | awnr/Mistral-7B-v0.1-signtensors-5-over-16 | awnr/Mistral-7B-v0.1-signtensors-5-over-16 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21182684166899385}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | awnr/Mistral-7B-v0.1-signtensors-3-over-8 | 12f4db59-10fe-47d0-86df-343ea8978249 | 0.0.1 | hfopenllm_v2/awnr_Mistral-7B-v0.1-signtensors-3-over-8/1762652580.02111 | 1762652580.021111 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | awnr/Mistral-7B-v0.1-signtensors-3-over-8 | awnr/Mistral-7B-v0.1-signtensors-3-over-8 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.23942915907569692}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | allura-org/Mistral-Small-Sisyphus-24b-2503 | ce2ee38f-cb48-403f-894d-f2824d00a388 | 0.0.1 | hfopenllm_v2/allura-org_Mistral-Small-Sisyphus-24b-2503/1762652580.007755 | 1762652580.007756 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | allura-org/Mistral-Small-Sisyphus-24b-2503 | allura-org/Mistral-Small-Sisyphus-24b-2503 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6848362345243952}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | mistral | allura-org/Mistral-Small-24b-Sertraline-0304 | 34f35618-3ecf-4704-ab7a-ec9e8a5d08c1 | 0.0.1 | hfopenllm_v2/allura-org_Mistral-Small-24b-Sertraline-0304/1762652580.007422 | 1762652580.007423 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | allura-org/Mistral-Small-24b-Sertraline-0304 | allura-org/Mistral-Small-24b-Sertraline-0304 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6799902037704402}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | mistral | DreadPoor/felix_dies-mistral-7B-model_stock | 0444a153-1852-4a0d-959e-750c933777bd | 0.0.1 | hfopenllm_v2/DreadPoor_felix_dies-mistral-7B-model_stock/1762652579.5887182 | 1762652579.5887191 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DreadPoor/felix_dies-mistral-7B-model_stock | DreadPoor/felix_dies-mistral-7B-model_stock | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.30077860077926566}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | Open-Orca/Mistral-7B-OpenOrca | c6e0aa8c-8765-4e2f-a6b2-cdeb885d29a4 | 0.0.1 | hfopenllm_v2/Open-Orca_Mistral-7B-OpenOrca/1762652579.799384 | 1762652579.799385 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Open-Orca/Mistral-7B-OpenOrca | Open-Orca/Mistral-7B-OpenOrca | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4977659277384008}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | mistral | Triangle104/Mistral-Redemption-Arc | 189f08b4-7e58-4820-9ff7-bcea4530e3dd | 0.0.1 | hfopenllm_v2/Triangle104_Mistral-Redemption-Arc/1762652579.929934 | 1762652579.9299352 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/Mistral-Redemption-Arc | Triangle104/Mistral-Redemption-Arc | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.40289432040319684}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | mistral | Triangle104/Mistral-Small-24b-Harmony | e8d645e6-8ec4-4c0c-8cf2-8aa7e126e1f1 | 0.0.1 | hfopenllm_v2/Triangle104_Mistral-Small-24b-Harmony/1762652579.930191 | 1762652579.9301918 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/Mistral-Small-24b-Harmony | Triangle104/Mistral-Small-24b-Harmony | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16871234989826994}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | mistral | llmat/Mistral-v0.3-7B-ORPO | 04a1b79b-a5af-420d-829b-0750341490cf | 0.0.1 | hfopenllm_v2/llmat_Mistral-v0.3-7B-ORPO/1762652580.325205 | 1762652580.325206 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | llmat/Mistral-v0.3-7B-ORPO | llmat/Mistral-v0.3-7B-ORPO | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3639764713183243}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.248} |
HF Open LLM v2 | mistral | llmat/Mistral-v0.3-7B-ORPO | ff710b55-0a89-4582-8caa-867efb88cf98 | 0.0.1 | hfopenllm_v2/llmat_Mistral-v0.3-7B-ORPO/1762652580.324949 | 1762652580.324949 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | llmat/Mistral-v0.3-7B-ORPO | llmat/Mistral-v0.3-7B-ORPO | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3770406964631622}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.248} |
HF Open LLM v2 | mistral | Unbabel/TowerInstruct-Mistral-7B-v0.2 | cc6d8d11-2273-41fa-95eb-5d1f7d4a2311 | 0.0.1 | hfopenllm_v2/Unbabel_TowerInstruct-Mistral-7B-v0.2/1762652579.938655 | 1762652579.938656 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Unbabel/TowerInstruct-Mistral-7B-v0.2 | Unbabel/TowerInstruct-Mistral-7B-v0.2 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2843422119975}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH", ... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | FuJhen/mistral_7b_v0.1_structedData_viggo | 3008b476-f005-4672-a953-c86b29ba3ef2 | 0.0.1 | hfopenllm_v2/FuJhen_mistral_7b_v0.1_structedData_viggo/1762652579.625654 | 1762652579.625655 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | FuJhen/mistral_7b_v0.1_structedData_viggo | FuJhen/mistral_7b_v0.1_structedData_viggo | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17832905579418165}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "?", "params_billions": 14.483} |
HF Open LLM v2 | mistral | FuJhen/mistral_7b_v0.1_structedData_e2e | 3ba2b06b-b44a-4ad6-bf38-f1602995c2f9 | 0.0.1 | hfopenllm_v2/FuJhen_mistral_7b_v0.1_structedData_e2e/1762652579.625389 | 1762652579.62539 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | FuJhen/mistral_7b_v0.1_structedData_e2e | FuJhen/mistral_7b_v0.1_structedData_e2e | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17268403391889076}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "?", "params_billions": 7.0} |
HF Open LLM v2 | mistral | tensopolis/mistral-small-r1-tensopolis | b2ee17e1-3d66-4622-8ea9-3bf8747371a5 | 0.0.1 | hfopenllm_v2/tensopolis_mistral-small-r1-tensopolis/1762652580.556001 | 1762652580.5560021 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tensopolis/mistral-small-r1-tensopolis | tensopolis/mistral-small-r1-tensopolis | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.462220242290456}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | mistral | tensopolis/mistral-small-2501-tensopolis-v1 | 53ec68aa-e4fc-430f-8ccf-f5886f1b9d4b | 0.0.1 | hfopenllm_v2/tensopolis_mistral-small-2501-tensopolis-v1/1762652580.555758 | 1762652580.555758 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tensopolis/mistral-small-2501-tensopolis-v1 | tensopolis/mistral-small-2501-tensopolis-v1 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7762104549262623}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | mistral | NousResearch/Hermes-2-Pro-Mistral-7B | b8d954d0-a820-4927-a7f8-b0083cf9db9c | 0.0.1 | hfopenllm_v2/NousResearch_Hermes-2-Pro-Mistral-7B/1762652579.790145 | 1762652579.790146 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Hermes-2-Pro-Mistral-7B | NousResearch/Hermes-2-Pro-Mistral-7B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5668337788179807}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | NousResearch/Yarn-Mistral-7b-64k | c7fcd944-78ab-422d-b0ef-8dc394266473 | 0.0.1 | hfopenllm_v2/NousResearch_Yarn-Mistral-7b-64k/1762652579.7932239 | 1762652579.793225 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Yarn-Mistral-7b-64k | NousResearch/Yarn-Mistral-7b-64k | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2079548930171944}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | mistral | NousResearch/Yarn-Mistral-7b-128k | c6411eb6-8304-49e6-ac7b-5300deb27c55 | 0.0.1 | hfopenllm_v2/NousResearch_Yarn-Mistral-7b-128k/1762652579.793008 | 1762652579.7930088 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Yarn-Mistral-7b-128k | NousResearch/Yarn-Mistral-7b-128k | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19336693307091848}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | mistral | NousResearch/DeepHermes-3-Mistral-24B-Preview | b1f439ee-711a-41b8-b63d-dd28cb63266e | 0.0.1 | hfopenllm_v2/NousResearch_DeepHermes-3-Mistral-24B-Preview/1762652579.78962 | 1762652579.7896209 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/DeepHermes-3-Mistral-24B-Preview | NousResearch/DeepHermes-3-Mistral-24B-Preview | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.45357761849669986}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | mistral | vicgalle/Merge-Mistral-Prometheus-7B | ecfdb6a4-36d7-4252-9677-10655b3855e5 | 0.0.1 | hfopenllm_v2/vicgalle_Merge-Mistral-Prometheus-7B/1762652580.5881548 | 1762652580.5881548 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | vicgalle/Merge-Mistral-Prometheus-7B | vicgalle/Merge-Mistral-Prometheus-7B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.48480143796238423}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | yam-peleg/Hebrew-Mistral-7B-200K | 83a71a32-796a-4fec-9513-2f4b5e032749 | 0.0.1 | hfopenllm_v2/yam-peleg_Hebrew-Mistral-7B-200K/1762652580.6036632 | 1762652580.603664 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yam-peleg/Hebrew-Mistral-7B-200K | yam-peleg/Hebrew-Mistral-7B-200K | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1855731680829089}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.504} |
HF Open LLM v2 | mistral | yam-peleg/Hebrew-Mistral-7B-200K | 4d45347d-4491-4d7b-9abe-02c42974f520 | 0.0.1 | hfopenllm_v2/yam-peleg_Hebrew-Mistral-7B-200K/1762652580.6038961 | 1762652580.603897 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yam-peleg/Hebrew-Mistral-7B-200K | yam-peleg/Hebrew-Mistral-7B-200K | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17698041197356346}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.504} |
HF Open LLM v2 | mistral | yam-peleg/Hebrew-Mistral-7B | 99c28dc3-f614-430a-99d7-31c2218c4d7f | 0.0.1 | hfopenllm_v2/yam-peleg_Hebrew-Mistral-7B/1762652580.603384 | 1762652580.603385 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yam-peleg/Hebrew-Mistral-7B | yam-peleg/Hebrew-Mistral-7B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.23283443485507344}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.504} |
HF Open LLM v2 | mistral | mistral-community/mixtral-8x22B-v0.3 | abeddace-67d6-484a-b410-95d92819dfe5 | 0.0.1 | hfopenllm_v2/mistral-community_mixtral-8x22B-v0.3/1762652580.361342 | 1762652580.361343 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistral-community/mixtral-8x22B-v0.3 | mistral-community/mixtral-8x22B-v0.3 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.25826362939223485}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 140.63} |
HF Open LLM v2 | mistral | mistral-community/Mistral-7B-v0.2 | a65136c6-b3d7-4107-8d3a-0ce84b77965b | 0.0.1 | hfopenllm_v2/mistral-community_Mistral-7B-v0.2/1762652580.360901 | 1762652580.3609018 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistral-community/Mistral-7B-v0.2 | mistral-community/Mistral-7B-v0.2 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.22663976028050017}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | mistral-community/Mixtral-8x22B-v0.1 | 810fc203-f10a-49ad-8a6f-58cbd70f2205 | 0.0.1 | hfopenllm_v2/mistral-community_Mixtral-8x22B-v0.1/1762652580.361141 | 1762652580.361141 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistral-community/Mixtral-8x22B-v0.1 | mistral-community/Mixtral-8x22B-v0.1 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3166564417177914}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Unknown", "params_billions": 0.0} |
HF Open LLM v2 | mistral | Locutusque/TinyMistral-248M-v2.5 | 9a3f7863-0041-4473-b3f0-ad25f0d9310f | 0.0.1 | hfopenllm_v2/Locutusque_TinyMistral-248M-v2.5/1762652579.73623 | 1762652579.7362418 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Locutusque/TinyMistral-248M-v2.5 | Locutusque/TinyMistral-248M-v2.5 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1336409615376091}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 0.248} |
HF Open LLM v2 | mistral | shivank21/mistral_dpo_self | 7b07e583-36df-47df-8439-224eca2e5761 | 0.0.1 | hfopenllm_v2/shivank21_mistral_dpo_self/1762652580.5158348 | 1762652580.515836 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | shivank21/mistral_dpo_self | shivank21/mistral_dpo_self | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.340345837932242}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "", "params_billions": 7.913} |
HF Open LLM v2 | mistral | UCLA-AGI/Mistral7B-PairRM-SPPO-Iter1 | 01c4d932-bdcf-4840-83cb-e441585d70e2 | 0.0.1 | hfopenllm_v2/UCLA-AGI_Mistral7B-PairRM-SPPO-Iter1/1762652579.9377868 | 1762652579.937788 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | UCLA-AGI/Mistral7B-PairRM-SPPO-Iter1 | UCLA-AGI/Mistral7B-PairRM-SPPO-Iter1 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5047352136774869}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | UCLA-AGI/Mistral7B-PairRM-SPPO | 01613adc-1206-4695-ae19-31f2b7ee0d9d | 0.0.1 | hfopenllm_v2/UCLA-AGI_Mistral7B-PairRM-SPPO/1762652579.93755 | 1762652579.93755 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | UCLA-AGI/Mistral7B-PairRM-SPPO | UCLA-AGI/Mistral7B-PairRM-SPPO | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.43549227161708715}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | UCLA-AGI/Mistral7B-PairRM-SPPO-Iter3 | 66cc8076-71be-43fc-9efb-edd8ad19a6b6 | 0.0.1 | hfopenllm_v2/UCLA-AGI_Mistral7B-PairRM-SPPO-Iter3/1762652579.938179 | 1762652579.9381802 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | UCLA-AGI/Mistral7B-PairRM-SPPO-Iter3 | UCLA-AGI/Mistral7B-PairRM-SPPO-Iter3 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4350678422142138}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | UCLA-AGI/Mistral7B-PairRM-SPPO-Iter2 | b0e6d5e1-3f41-4dfc-8845-b6d028820816 | 0.0.1 | hfopenllm_v2/UCLA-AGI_Mistral7B-PairRM-SPPO-Iter2/1762652579.937983 | 1762652579.937984 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | UCLA-AGI/Mistral7B-PairRM-SPPO-Iter2 | UCLA-AGI/Mistral7B-PairRM-SPPO-Iter2 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4445848127413041}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | hotmailuser/Mistral-modelstock-24B | 58269430-efba-4d04-a69e-8ef666f2afee | 0.0.1 | hfopenllm_v2/hotmailuser_Mistral-modelstock-24B/1762652580.195392 | 1762652580.195392 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | hotmailuser/Mistral-modelstock-24B | hotmailuser/Mistral-modelstock-24B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3424192254329623}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | mistral | hotmailuser/Mistral-modelstock2-24B | 7c9aa35b-3d8e-4b3f-8ae7-35698a1f1c70 | 0.0.1 | hfopenllm_v2/hotmailuser_Mistral-modelstock2-24B/1762652580.195659 | 1762652580.19566 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | hotmailuser/Mistral-modelstock2-24B | hotmailuser/Mistral-modelstock2-24B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.43184528163051816}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | mistral | bamec66557/Mistral-Nemo-VICIOUS_MESH-12B-2407 | 9cd84a08-1f21-42ad-b8c0-eeb2df93ee29 | 0.0.1 | hfopenllm_v2/bamec66557_Mistral-Nemo-VICIOUS_MESH-12B-2407/1762652580.026026 | 1762652580.026027 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bamec66557/Mistral-Nemo-VICIOUS_MESH-12B-2407 | bamec66557/Mistral-Nemo-VICIOUS_MESH-12B-2407 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6705729686121713}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | allknowingroger/Mistralmash1-7B-s | c5e7d08d-4430-43f6-a293-5381b2f13ca6 | 0.0.1 | hfopenllm_v2/allknowingroger_Mistralmash1-7B-s/1762652579.990727 | 1762652579.990727 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | allknowingroger/Mistralmash1-7B-s | allknowingroger/Mistralmash1-7B-s | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.39610012544493056}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | allknowingroger/Mistralmash2-7B-s | 7a9d4b20-e704-4f50-a09b-ccb67d417824 | 0.0.1 | hfopenllm_v2/allknowingroger_Mistralmash2-7B-s/1762652579.991016 | 1762652579.9910169 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | allknowingroger/Mistralmash2-7B-s | allknowingroger/Mistralmash2-7B-s | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4101883003763348}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | uukuguy/speechless-code-mistral-7b-v1.0 | cebdb6d6-a12c-47f6-b912-4b8e98763c48 | 0.0.1 | hfopenllm_v2/uukuguy_speechless-code-mistral-7b-v1.0/1762652580.581523 | 1762652580.581524 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | uukuguy/speechless-code-mistral-7b-v1.0 | uukuguy/speechless-code-mistral-7b-v1.0 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.36652415590632853}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | mistral | irahulpandey/mistralai-7B-slerp-v0.1 | 034c23f5-6c03-4cee-b6b2-7263426cf975 | 0.0.1 | hfopenllm_v2/irahulpandey_mistralai-7B-slerp-v0.1/1762652580.23053 | 1762652580.230531 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | irahulpandey/mistralai-7B-slerp-v0.1 | irahulpandey/mistralai-7B-slerp-v0.1 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4966167546554254}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | teknium/CollectiveCognition-v1.1-Mistral-7B | 626bfec9-65d1-4250-8d07-d9c8a008b554 | 0.0.1 | hfopenllm_v2/teknium_CollectiveCognition-v1.1-Mistral-7B/1762652580.55394 | 1762652580.553941 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | teknium/CollectiveCognition-v1.1-Mistral-7B | teknium/CollectiveCognition-v1.1-Mistral-7B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.27904626391308396}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | mistral | teknium/OpenHermes-2.5-Mistral-7B | 66d1a6cf-41da-4226-a06c-fc99641e754a | 0.0.1 | hfopenllm_v2/teknium_OpenHermes-2.5-Mistral-7B/1762652580.554678 | 1762652580.5546792 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | teknium/OpenHermes-2.5-Mistral-7B | teknium/OpenHermes-2.5-Mistral-7B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5571417173100706}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | teknium/OpenHermes-2-Mistral-7B | f24b2adb-f12d-4dd8-984b-8ab43e15720f | 0.0.1 | hfopenllm_v2/teknium_OpenHermes-2-Mistral-7B/1762652580.5544581 | 1762652580.5544589 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | teknium/OpenHermes-2-Mistral-7B | teknium/OpenHermes-2-Mistral-7B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5286151854856226}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | mistral | aws-prototyping/MegaBeam-Mistral-7B-512k | f05d6512-16ca-4f44-a31f-392f8f71da74 | 0.0.1 | hfopenllm_v2/aws-prototyping_MegaBeam-Mistral-7B-512k/1762652580.0217311 | 1762652580.0217311 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | aws-prototyping/MegaBeam-Mistral-7B-512k | aws-prototyping/MegaBeam-Mistral-7B-512k | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5972586071623293}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.