_leaderboard stringclasses 1
value | _developer stringclasses 559
values | _model stringlengths 9 102 | _uuid stringlengths 36 36 | schema_version stringclasses 1
value | evaluation_id stringlengths 35 133 | retrieved_timestamp stringlengths 13 18 | source_data stringclasses 1
value | evaluation_source_name stringclasses 1
value | evaluation_source_type stringclasses 1
value | source_organization_name stringclasses 1
value | source_organization_url null | source_organization_logo_url null | evaluator_relationship stringclasses 1
value | model_name stringlengths 4 102 | model_id stringlengths 9 102 | model_developer stringclasses 559
values | model_inference_platform stringclasses 1
value | evaluation_results stringlengths 1.35k 1.41k | additional_details stringclasses 660
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
HF Open LLM v2 | mistral | axolotl-ai-co/romulus-mistral-nemo-12b-simpo | 3f48c9eb-dbfa-4035-96a6-d4f516fa1e80 | 0.0.1 | hfopenllm_v2/axolotl-ai-co_romulus-mistral-nemo-12b-simpo/1762652580.021987 | 1762652580.0219882 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | axolotl-ai-co/romulus-mistral-nemo-12b-simpo | axolotl-ai-co/romulus-mistral-nemo-12b-simpo | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.607924750772395}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | theprint/Conversely-Mistral-7B | 5adde1ed-2d8f-4aa6-96f9-042df5358747 | 0.0.1 | hfopenllm_v2/theprint_Conversely-Mistral-7B/1762652580.56185 | 1762652580.5618508 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | theprint/Conversely-Mistral-7B | theprint/Conversely-Mistral-7B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2608113139802391}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "?", "params_billions": 14.496} |
HF Open LLM v2 | mistral | chujiezheng/Mistral7B-PairRM-SPPO-ExPO | d7e88fea-5c3d-4b9c-85a9-a0cf35a97ea0 | 0.0.1 | hfopenllm_v2/chujiezheng_Mistral7B-PairRM-SPPO-ExPO/1762652580.101214 | 1762652580.101215 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | chujiezheng/Mistral7B-PairRM-SPPO-ExPO | chujiezheng/Mistral7B-PairRM-SPPO-ExPO | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.36734863495525205}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | spmurrayzzz/Mistral-Syndicate-7B | 80934f3c-8d0b-49be-9f42-e187b4729cff | 0.0.1 | hfopenllm_v2/spmurrayzzz_Mistral-Syndicate-7B/1762652580.534304 | 1762652580.534305 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | spmurrayzzz/Mistral-Syndicate-7B | spmurrayzzz/Mistral-Syndicate-7B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.249595517670891}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | TencentARC/Mistral_Pro_8B_v0.1 | 07ac72af-fa7e-4fe2-8a67-e893edbbd206 | 0.0.1 | hfopenllm_v2/TencentARC_Mistral_Pro_8B_v0.1/1762652579.913616 | 1762652579.913617 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TencentARC/Mistral_Pro_8B_v0.1 | TencentARC/Mistral_Pro_8B_v0.1 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21145227995053123}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 8.987} |
HF Open LLM v2 | mistral | TencentARC/MetaMath-Mistral-Pro | c2274449-ebc7-4e53-94bf-82e1f6810f6b | 0.0.1 | hfopenllm_v2/TencentARC_MetaMath-Mistral-Pro/1762652579.913366 | 1762652579.913366 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TencentARC/MetaMath-Mistral-Pro | TencentARC/MetaMath-Mistral-Pro | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21187670935340452}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 8.987} |
HF Open LLM v2 | mistral | flammenai/Mahou-1.2a-mistral-7B | d9804b0c-37db-492f-a1ba-851137e697f0 | 0.0.1 | hfopenllm_v2/flammenai_Mahou-1.2a-mistral-7B/1762652580.155141 | 1762652580.155141 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | flammenai/Mahou-1.2a-mistral-7B | flammenai/Mahou-1.2a-mistral-7B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4552010886669592}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | flammenai/Mahou-1.5-mistral-nemo-12B | 1c4e9e6a-7bb8-410f-9a3b-f88ea0ed474c | 0.0.1 | hfopenllm_v2/flammenai_Mahou-1.5-mistral-nemo-12B/1762652580.155725 | 1762652580.1557262 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | flammenai/Mahou-1.5-mistral-nemo-12B | flammenai/Mahou-1.5-mistral-nemo-12B | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6751441730164851}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistral | EpistemeAI2/Fireball-MathMistral-Nemo-Base-2407-v2dpo | b798f31f-5fab-4f21-8689-fe832afb873b | 0.0.1 | hfopenllm_v2/EpistemeAI2_Fireball-MathMistral-Nemo-Base-2407-v2dpo/1762652579.612103 | 1762652579.612104 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EpistemeAI2/Fireball-MathMistral-Nemo-Base-2407-v2dpo | EpistemeAI2/Fireball-MathMistral-Nemo-Base-2407-v2dpo | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.30972043067948596}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 11.58} |
HF Open LLM v2 | mistral | cckm/tinymistral_950m | d0dbcd95-252f-46e0-9699-81b293cb7db5 | 0.0.1 | hfopenllm_v2/cckm_tinymistral_950m/1762652580.099487 | 1762652580.099488 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cckm/tinymistral_950m | cckm/tinymistral_950m | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.23952889444451833}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 0.955} |
HF Open LLM v2 | mistral | xinchen9/Mistral-7B-CoT | 6c54d5e2-7fca-4fa3-9d04-0f44d0651018 | 0.0.1 | hfopenllm_v2/xinchen9_Mistral-7B-CoT/1762652580.5978932 | 1762652580.597894 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | xinchen9/Mistral-7B-CoT | xinchen9/Mistral-7B-CoT | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2783470081605695}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | nlpguy/Mistral-NeMo-Minitron-Upscale-v1 | 97b61e29-2157-4167-b5bd-94919ecdcacc | 0.0.1 | hfopenllm_v2/nlpguy_Mistral-NeMo-Minitron-Upscale-v1/1762652580.4083898 | 1762652580.408391 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nlpguy/Mistral-NeMo-Minitron-Upscale-v1 | nlpguy/Mistral-NeMo-Minitron-Upscale-v1 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16484040124647048}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.451} |
HF Open LLM v2 | mistral | nlpguy/Mistral-NeMo-Minitron-Upscale-v3 | 7d2d135a-ab81-49fa-8c17-07f9bd54399d | 0.0.1 | hfopenllm_v2/nlpguy_Mistral-NeMo-Minitron-Upscale-v3/1762652580.408863 | 1762652580.408864 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nlpguy/Mistral-NeMo-Minitron-Upscale-v3 | nlpguy/Mistral-NeMo-Minitron-Upscale-v3 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.14120976786038822}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.451} |
HF Open LLM v2 | mistral | nlpguy/Mistral-NeMo-Minitron-Upscale-v2 | 9cee29c1-b8dc-4a2c-b117-d5912b890824 | 0.0.1 | hfopenllm_v2/nlpguy_Mistral-NeMo-Minitron-Upscale-v2/1762652580.4086552 | 1762652580.408656 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nlpguy/Mistral-NeMo-Minitron-Upscale-v2 | nlpguy/Mistral-NeMo-Minitron-Upscale-v2 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.15727159492369136}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.451} |
HF Open LLM v2 | mistral | mistralai/Codestral-22B-v0.1 | b6fa1ae6-3df8-437d-a844-3fa022c12370 | 0.0.1 | hfopenllm_v2/mistralai_Codestral-22B-v0.1/1762652580.361543 | 1762652580.361544 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Codestral-22B-v0.1 | mistralai/Codestral-22B-v0.1 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5771752283939946}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 22.247} |
HF Open LLM v2 | mistral | mistralai/Mistral-Nemo-Base-2407 | 51b35f7f-f6f7-44ca-9816-b3d812112131 | 0.0.1 | hfopenllm_v2/mistralai_Mistral-Nemo-Base-2407/1762652580.363275 | 1762652580.363276 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mistral-Nemo-Base-2407 | mistralai/Mistral-Nemo-Base-2407 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16299197241098062}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 11.58} |
HF Open LLM v2 | mistral | mistralai/Mixtral-8x7B-v0.1 | f1822f64-0594-4f16-98f4-29932c604187 | 0.0.1 | hfopenllm_v2/mistralai_Mixtral-8x7B-v0.1/1762652580.364961 | 1762652580.364962 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mixtral-8x7B-v0.1 | mistralai/Mixtral-8x7B-v0.1 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.24152692633324024}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 46.703} |
HF Open LLM v2 | mistral | mistralai/Mixtral-8x7B-v0.1 | 4384c278-c869-4591-84fd-a8b2843fe42d | 0.0.1 | hfopenllm_v2/mistralai_Mixtral-8x7B-v0.1/1762652580.3651662 | 1762652580.3651662 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mixtral-8x7B-v0.1 | mistralai/Mixtral-8x7B-v0.1 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.23260947618984296}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MixtralForCausalLM", "params_billions": 46.703} |
HF Open LLM v2 | mistral | mistralai/Mistral-7B-v0.1 | 44381c62-a310-4f01-bd66-9d1434638cf4 | 0.0.1 | hfopenllm_v2/mistralai_Mistral-7B-v0.1/1762652580.362653 | 1762652580.362654 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mistral-7B-v0.1 | mistralai/Mistral-7B-v0.1 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2385548123423627}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | mistralai/Mixtral-8x22B-v0.1 | b08cfbfa-906a-4dd0-b258-a7a56a6dcda4 | 0.0.1 | hfopenllm_v2/mistralai_Mixtral-8x22B-v0.1/1762652580.364491 | 1762652580.364492 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mixtral-8x22B-v0.1 | mistralai/Mixtral-8x22B-v0.1 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.25826362939223485}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 140.621} |
HF Open LLM v2 | mistral | mistralai/Mistral-7B-v0.3 | 1a3acc9e-b2cd-4f80-8fcc-b227eee29f26 | 0.0.1 | hfopenllm_v2/mistralai_Mistral-7B-v0.3/1762652580.362854 | 1762652580.362854 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mistral-7B-v0.3 | mistralai/Mistral-7B-v0.3 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.22663976028050017}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.248} |
HF Open LLM v2 | mistral | mistralai/Mistral-Small-24B-Base-2501 | 6b30f50f-9a89-4a11-bcf9-4f38c46c1f18 | 0.0.1 | hfopenllm_v2/mistralai_Mistral-Small-24B-Base-2501/1762652580.363713 | 1762652580.363714 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mistral-Small-24B-Base-2501 | mistralai/Mistral-Small-24B-Base-2501 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16723848278124265}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | mistral | shivam9980/mistral-7b-news-cnn-merged | ce626634-c5a4-422d-8b03-1a28108809ce | 0.0.1 | hfopenllm_v2/shivam9980_mistral-7b-news-cnn-merged/1762652580.515563 | 1762652580.515563 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | shivam9980/mistral-7b-news-cnn-merged | shivam9980/mistral-7b-news-cnn-merged | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4634192830578421}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "?", "params_billions": 7.723} |
HF Open LLM v2 | mistral | M4-ai/TinyMistral-248M-v3 | 830423e1-ec14-4477-8c82-8516bb8e954f | 0.0.1 | hfopenllm_v2/M4-ai_TinyMistral-248M-v3/1762652579.742201 | 1762652579.742202 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | M4-ai/TinyMistral-248M-v3 | M4-ai/TinyMistral-248M-v3 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16386631914431488}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 0.248} |
HF Open LLM v2 | mistral | Pretergeek/openchat-3.5-0106_Rebased_Mistral-7B-v0.2 | 56d07a1f-1f1f-4559-b57d-bee3bf884860 | 0.0.1 | hfopenllm_v2/Pretergeek_openchat-3.5-0106_Rebased_Mistral-7B-v0.2/1762652579.817152 | 1762652579.817153 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Pretergeek/openchat-3.5-0106_Rebased_Mistral-7B-v0.2 | Pretergeek/openchat-3.5-0106_Rebased_Mistral-7B-v0.2 | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.37062106322335847}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | kaist-ai/mistral-orpo-capybara-7k | 811cf797-62a1-4fda-960c-ee51f3e24a03 | 0.0.1 | hfopenllm_v2/kaist-ai_mistral-orpo-capybara-7k/1762652580.30416 | 1762652580.304161 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | kaist-ai/mistral-orpo-capybara-7k | kaist-ai/mistral-orpo-capybara-7k | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.536733644507684}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistral | nvidia/Mistral-NeMo-Minitron-8B-Base | 7bbc4787-9899-4d90-90c6-dec88bc7dd52 | 0.0.1 | hfopenllm_v2/nvidia_Mistral-NeMo-Minitron-8B-Base/1762652580.415714 | 1762652580.415715 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nvidia/Mistral-NeMo-Minitron-8B-Base | nvidia/Mistral-NeMo-Minitron-8B-Base | mistral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19456597383830457}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.88} |
HF Open LLM v2 | huu-ontocord | huu-ontocord/wide_3b_orpo_stage1.1-ss1-orpo3 | 50854a36-b87e-421d-b8d5-7a46054ecc59 | 0.0.1 | hfopenllm_v2/huu-ontocord_wide_3b_orpo_stage1.1-ss1-orpo3/1762652580.202209 | 1762652580.20221 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | huu-ontocord/wide_3b_orpo_stage1.1-ss1-orpo3 | huu-ontocord/wide_3b_orpo_stage1.1-ss1-orpo3 | huu-ontocord | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.15052726764983576}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.759} |
HF Open LLM v2 | anthropic | xxx777xxxASD/L3.1-ClaudeMaid-4x8B | ae6d070b-71de-40c3-8f69-944ce2e33abb | 0.0.1 | hfopenllm_v2/xxx777xxxASD_L3.1-ClaudeMaid-4x8B/1762652580.602767 | 1762652580.602768 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | xxx777xxxASD/L3.1-ClaudeMaid-4x8B | xxx777xxxASD/L3.1-ClaudeMaid-4x8B | anthropic | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6696487541944263}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 24.942} |
HF Open LLM v2 | P0x0 | P0x0/Astra-v1-12B | 349ae5f5-55d0-4486-a6dc-2b5644fac045 | 0.0.1 | hfopenllm_v2/P0x0_Astra-v1-12B/1762652579.8091059 | 1762652579.8091059 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | P0x0/Astra-v1-12B | P0x0/Astra-v1-12B | P0x0 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.28059437847134494}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | paloalma | paloalma/ECE-TW3-JRGL-V5 | 9468fda5-a233-4d19-9a99-602e694f4a02 | 0.0.1 | hfopenllm_v2/paloalma_ECE-TW3-JRGL-V5/1762652580.433843 | 1762652580.4338439 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | paloalma/ECE-TW3-JRGL-V5 | paloalma/ECE-TW3-JRGL-V5 | paloalma | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4552509563513699}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 72.289} |
HF Open LLM v2 | paloalma | paloalma/ECE-TW3-JRGL-V1 | d86238d3-3a4e-467a-8ce1-e6a4a903aa3b | 0.0.1 | hfopenllm_v2/paloalma_ECE-TW3-JRGL-V1/1762652580.433397 | 1762652580.433398 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | paloalma/ECE-TW3-JRGL-V1 | paloalma/ECE-TW3-JRGL-V1 | paloalma | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5534947273235016}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 68.977} |
HF Open LLM v2 | paloalma | paloalma/ECE-TW3-JRGL-V2 | d8d1a5b1-cc9a-4af9-b95f-db78f7edf70e | 0.0.1 | hfopenllm_v2/paloalma_ECE-TW3-JRGL-V2/1762652580.433646 | 1762652580.4336472 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | paloalma/ECE-TW3-JRGL-V2 | paloalma/ECE-TW3-JRGL-V2 | paloalma | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2254894790267601}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.288} |
HF Open LLM v2 | paloalma | paloalma/TW3-JRGL-v2 | 525f2e27-bd77-49e9-85db-61efddbdd186 | 0.0.1 | hfopenllm_v2/paloalma_TW3-JRGL-v2/1762652580.43424 | 1762652580.434241 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | paloalma/TW3-JRGL-v2 | paloalma/TW3-JRGL-v2 | paloalma | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5316127874040878}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 72.289} |
HF Open LLM v2 | paloalma | paloalma/Le_Triomphant-ECE-TW3 | 49f92222-f6cd-47e5-968d-10dc4345dd90 | 0.0.1 | hfopenllm_v2/paloalma_Le_Triomphant-ECE-TW3/1762652580.434039 | 1762652580.434039 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | paloalma/Le_Triomphant-ECE-TW3 | paloalma/Le_Triomphant-ECE-TW3 | paloalma | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5402055435134332}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 72.289} |
HF Open LLM v2 | Yash21 | Yash21/TinyYi-7B-Test | d6a9abee-29ee-44e0-802c-c3e4354ebbac | 0.0.1 | hfopenllm_v2/Yash21_TinyYi-7B-Test/1762652579.960211 | 1762652579.960212 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Yash21/TinyYi-7B-Test | Yash21/TinyYi-7B-Test | Yash21 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18564852369490728}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 6.061} |
HF Open LLM v2 | mosaicml | mosaicml/mpt-7b | 5e55c7ee-90f6-40a4-83ca-4a3acdad40f2 | 0.0.1 | hfopenllm_v2/mosaicml_mpt-7b/1762652580.3728561 | 1762652580.372857 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mosaicml/mpt-7b | mosaicml/mpt-7b | mosaicml | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21519900530592162}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MPTForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | RWKV | RWKV/rwkv-raven-14b | 9a90826f-9062-48aa-b047-d24f4e0d85ef | 0.0.1 | hfopenllm_v2/RWKV_rwkv-raven-14b/1762652579.849975 | 1762652579.849976 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | RWKV/rwkv-raven-14b | RWKV/rwkv-raven-14b | RWKV | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.07683723631076655}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "RwkvForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | DZgas | DZgas/GIGABATEMAN-7B | 180be3a9-1d8e-4705-bda4-032bc66768c6 | 0.0.1 | hfopenllm_v2/DZgas_GIGABATEMAN-7B/1762652579.524226 | 1762652579.5242271 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DZgas/GIGABATEMAN-7B | DZgas/GIGABATEMAN-7B | DZgas | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.46074637517342876}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | ContactDoctor | ContactDoctor/Bio-Medical-3B-CoT-012025 | 4ad50c15-9b6d-40c8-b8ce-74253ecfe258 | 0.0.1 | hfopenllm_v2/ContactDoctor_Bio-Medical-3B-CoT-012025/1762652579.509939 | 1762652579.509939 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ContactDoctor/Bio-Medical-3B-CoT-012025 | ContactDoctor/Bio-Medical-3B-CoT-012025 | ContactDoctor | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.360379349016166}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.085} |
HF Open LLM v2 | dzakwan | dzakwan/dzakwan-MoE-4x7b-Beta | f4ceacae-0b81-44ac-8b9d-31d81e145bab | 0.0.1 | hfopenllm_v2/dzakwan_dzakwan-MoE-4x7b-Beta/1762652580.138297 | 1762652580.138298 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | dzakwan/dzakwan-MoE-4x7b-Beta | dzakwan/dzakwan-MoE-4x7b-Beta | dzakwan | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.44426011870725235}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MixtralForCausalLM", "params_billions": 24.154} |
HF Open LLM v2 | langgptai | langgptai/qwen1.5-7b-chat-sa-v0.1 | 36137543-78a7-42a6-ad41-a4121797eec4 | 0.0.1 | hfopenllm_v2/langgptai_qwen1.5-7b-chat-sa-v0.1/1762652580.314067 | 1762652580.314068 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | langgptai/qwen1.5-7b-chat-sa-v0.1 | langgptai/qwen1.5-7b-chat-sa-v0.1 | langgptai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.42677429221133256}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "?", "params_billions": 15.443} |
HF Open LLM v2 | Azure99 | Azure99/blossom-v5.1-9b | 8eb55323-b0d7-4419-aec6-03de8bcd472e | 0.0.1 | hfopenllm_v2/Azure99_blossom-v5.1-9b/1762652579.487347 | 1762652579.487348 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Azure99/blossom-v5.1-9b | Azure99/blossom-v5.1-9b | Azure99 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5085816744016985}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.829} |
HF Open LLM v2 | Azure99 | Azure99/Blossom-V6-14B | 24ce59a5-c351-4ed8-8944-8ec5db739da8 | 0.0.1 | hfopenllm_v2/Azure99_Blossom-V6-14B/1762652579.486225 | 1762652579.4862258 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Azure99/Blossom-V6-14B | Azure99/Blossom-V6-14B | Azure99 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6395486198841297}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | Azure99 | Azure99/Blossom-V6-7B | 35949fb3-8c01-45cf-b4db-bbe983b15ac6 | 0.0.1 | hfopenllm_v2/Azure99_Blossom-V6-7B/1762652579.486468 | 1762652579.486469 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Azure99/Blossom-V6-7B | Azure99/Blossom-V6-7B | Azure99 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5538194213575536}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Azure99 | Azure99/blossom-v5.1-34b | d2342413-1b55-4da5-a6e5-da6274f309ad | 0.0.1 | hfopenllm_v2/Azure99_blossom-v5.1-34b/1762652579.4871309 | 1762652579.4871309 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Azure99/blossom-v5.1-34b | Azure99/blossom-v5.1-34b | Azure99 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5696562897556262}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 34.389} |
HF Open LLM v2 | Azure99 | Azure99/blossom-v5-32b | 6adfe39d-f2c2-4101-8f0f-7496d55397cd | 0.0.1 | hfopenllm_v2/Azure99_blossom-v5-32b/1762652579.4866729 | 1762652579.4866738 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Azure99/blossom-v5-32b | Azure99/blossom-v5-32b | Azure99 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5235441960664371}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 32.512} |
HF Open LLM v2 | streamerbtw1002 | streamerbtw1002/Nexuim-R1-7B-Instruct | 3e78ef29-f546-41b0-af2b-f3ae4154e396 | 0.0.1 | hfopenllm_v2/streamerbtw1002_Nexuim-R1-7B-Instruct/1762652580.541884 | 1762652580.541885 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | streamerbtw1002/Nexuim-R1-7B-Instruct | streamerbtw1002/Nexuim-R1-7B-Instruct | streamerbtw1002 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6934289906337407}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | DRXD1000 | DRXD1000/Phoenix-7B | bff80553-e91f-470e-923c-7f8103d37fca | 0.0.1 | hfopenllm_v2/DRXD1000_Phoenix-7B/1762652579.5236301 | 1762652579.523632 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DRXD1000/Phoenix-7B | DRXD1000/Phoenix-7B | DRXD1000 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3209617149164218}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | DRXD1000 | DRXD1000/Atlas-7B | 1f223500-a1d6-471f-b3cf-2575ab5a52c8 | 0.0.1 | hfopenllm_v2/DRXD1000_Atlas-7B/1762652579.5232708 | 1762652579.523272 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DRXD1000/Atlas-7B | DRXD1000/Atlas-7B | DRXD1000 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3704459722425387}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 7.768} |
HF Open LLM v2 | NLPark | NLPark/B-and-W_Flycatcher-3AD1E | 95b94fcb-7aba-4473-b88f-36dddcd646c1 | 0.0.1 | hfopenllm_v2/NLPark_B-and-W_Flycatcher-3AD1E/1762652579.7682638 | 1762652579.768265 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NLPark/B-and-W_Flycatcher-3AD1E | NLPark/B-and-W_Flycatcher-3AD1E | NLPark | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.49084650948372543}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | NLPark | NLPark/Shi-Ci-Robin-Test_3AD80 | 0fa6785d-8db5-40f9-b259-3368ffb547d4 | 0.0.1 | hfopenllm_v2/NLPark_Shi-Ci-Robin-Test_3AD80/1762652579.768489 | 1762652579.76849 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NLPark/Shi-Ci-Robin-Test_3AD80 | NLPark/Shi-Ci-Robin-Test_3AD80 | NLPark | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7226547782107031}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | NLPark | NLPark/AnFeng_v3.1-Avocet | 17b3cc41-69ac-48a2-9371-a5d1368dfeb9 | 0.0.1 | hfopenllm_v2/NLPark_AnFeng_v3.1-Avocet/1762652579.76799 | 1762652579.767991 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NLPark/AnFeng_v3.1-Avocet | NLPark/AnFeng_v3.1-Avocet | NLPark | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5096311121158525}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 34.393} |
HF Open LLM v2 | arcee-ai | arcee-ai/Virtuoso-Small-v2 | 325cf0a5-6a72-466a-8e1e-531f03db6083 | 0.0.1 | hfopenllm_v2/arcee-ai_Virtuoso-Small-v2/1762652580.0172758 | 1762652580.017277 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | arcee-ai/Virtuoso-Small-v2 | arcee-ai/Virtuoso-Small-v2 | arcee-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8273181824226385}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | arcee-ai | arcee-ai/raspberry-3B | cef8c893-a903-4e30-b7e1-5f2fe8f2ac82 | 0.0.1 | hfopenllm_v2/arcee-ai_raspberry-3B/1762652580.017479 | 1762652580.017479 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | arcee-ai/raspberry-3B | arcee-ai/raspberry-3B | arcee-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.31541642840995227}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.086} |
HF Open LLM v2 | arcee-ai | arcee-ai/Arcee-Blitz | 01e8e033-1aa9-42e2-85d8-b7974d0c9e23 | 0.0.1 | hfopenllm_v2/arcee-ai_Arcee-Blitz/1762652580.0149639 | 1762652580.014965 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | arcee-ai/Arcee-Blitz | arcee-ai/Arcee-Blitz | arcee-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5543435861292482}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | arcee-ai | arcee-ai/SuperNova-Medius | 7e0e8ab9-a90b-4f0e-8e0a-eeceac12a4a1 | 0.0.1 | hfopenllm_v2/arcee-ai_SuperNova-Medius/1762652580.016611 | 1762652580.016612 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | arcee-ai/SuperNova-Medius | arcee-ai/SuperNova-Medius | arcee-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7183584001560305}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | arcee-ai | arcee-ai/Virtuoso-Small | cc51c0e0-4e5d-496c-bf02-8b5d8f474cd3 | 0.0.1 | hfopenllm_v2/arcee-ai_Virtuoso-Small/1762652580.017056 | 1762652580.017057 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | arcee-ai/Virtuoso-Small | arcee-ai/Virtuoso-Small | arcee-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7935211904413622}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | arcee-ai | arcee-ai/Arcee-Spark | 1dde2278-39aa-43cf-8d94-5d4a0bb514ca | 0.0.1 | hfopenllm_v2/arcee-ai_Arcee-Spark/1762652580.0159192 | 1762652580.0159202 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | arcee-ai/Arcee-Spark | arcee-ai/Arcee-Spark | arcee-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.571829412625168}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | arcee-ai | arcee-ai/Arcee-Spark | 84a51879-cd67-449b-ace0-f87cccd6ea8c | 0.0.1 | hfopenllm_v2/arcee-ai_Arcee-Spark/1762652580.015698 | 1762652580.015699 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | arcee-ai/Arcee-Spark | arcee-ai/Arcee-Spark | arcee-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5620874834328471}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | arcee-ai | arcee-ai/Virtuoso-Lite | 62afba84-9929-4882-843e-3f7db7b030a3 | 0.0.1 | hfopenllm_v2/arcee-ai_Virtuoso-Lite/1762652580.0168262 | 1762652580.0168269 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | arcee-ai/Virtuoso-Lite | arcee-ai/Virtuoso-Lite | arcee-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8099575792231279}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 10.306} |
HF Open LLM v2 | arcee-ai | arcee-ai/Arcee-Maestro-7B-Preview | 126f5eda-1529-450f-8557-dcd6a33b7bd4 | 0.0.1 | hfopenllm_v2/arcee-ai_Arcee-Maestro-7B-Preview/1762652580.015253 | 1762652580.015254 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | arcee-ai/Arcee-Maestro-7B-Preview | arcee-ai/Arcee-Maestro-7B-Preview | arcee-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2750247122080524}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.613} |
HF Open LLM v2 | arcee-ai | arcee-ai/Arcee-Nova | 9063608f-8d32-4e98-ad05-621f6239d0ba | 0.0.1 | hfopenllm_v2/arcee-ai_Arcee-Nova/1762652580.0154781 | 1762652580.015479 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | arcee-ai/Arcee-Nova | arcee-ai/Arcee-Nova | arcee-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7907485471881275}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.706} |
HF Open LLM v2 | NCSOFT | NCSOFT/Llama-VARCO-8B-Instruct | 38876858-0585-4edb-a4af-e4c71530429c | 0.0.1 | hfopenllm_v2/NCSOFT_Llama-VARCO-8B-Instruct/1762652579.767406 | 1762652579.7674072 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NCSOFT/Llama-VARCO-8B-Instruct | NCSOFT/Llama-VARCO-8B-Instruct | NCSOFT | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4470327619604871}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | noname0202 | noname0202/gemma-2-9b-sft-jp-en-zh-v1 | b32d34eb-14b5-410a-8772-041d40ca73b8 | 0.0.1 | hfopenllm_v2/noname0202_gemma-2-9b-sft-jp-en-zh-v1/1762652580.410035 | 1762652580.410036 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | noname0202/gemma-2-9b-sft-jp-en-zh-v1 | noname0202/gemma-2-9b-sft-jp-en-zh-v1 | noname0202 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.29880494864736673}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 9.242} |
HF Open LLM v2 | noname0202 | noname0202/Llama-3.2-4x3B-Instruct | e9511b0a-1083-4a0d-a9e0-97efcfc0891e | 0.0.1 | hfopenllm_v2/noname0202_Llama-3.2-4x3B-Instruct/1762652580.409481 | 1762652580.409481 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | noname0202/Llama-3.2-4x3B-Instruct | noname0202/Llama-3.2-4x3B-Instruct | noname0202 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7067181744438091}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 9.949} |
HF Open LLM v2 | noname0202 | noname0202/gemma-2-9b-sft-jp-en-zh-v2 | ee687c56-a9b4-4205-866b-b3067c066992 | 0.0.1 | hfopenllm_v2/noname0202_gemma-2-9b-sft-jp-en-zh-v2/1762652580.4102452 | 1762652580.4102452 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | noname0202/gemma-2-9b-sft-jp-en-zh-v2 | noname0202/gemma-2-9b-sft-jp-en-zh-v2 | noname0202 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3993470657854493}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 9.242} |
HF Open LLM v2 | GalrionSoftworks | GalrionSoftworks/MN-LooseCannon-12B-v1 | eb76e049-3a5d-4786-9724-800b719a6113 | 0.0.1 | hfopenllm_v2/GalrionSoftworks_MN-LooseCannon-12B-v1/1762652579.626794 | 1762652579.626794 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | GalrionSoftworks/MN-LooseCannon-12B-v1 | GalrionSoftworks/MN-LooseCannon-12B-v1 | GalrionSoftworks | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5417791459992819}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | GalrionSoftworks | GalrionSoftworks/MagnusIntellectus-12B-v1 | 99a948ab-cc5b-4f3a-aae0-684cbfb6ffb3 | 0.0.1 | hfopenllm_v2/GalrionSoftworks_MagnusIntellectus-12B-v1/1762652579.62705 | 1762652579.627051 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | GalrionSoftworks/MagnusIntellectus-12B-v1 | GalrionSoftworks/MagnusIntellectus-12B-v1 | GalrionSoftworks | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4421368635221213}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | CYFRAGOVPL | CYFRAGOVPL/Llama-PLLuM-8B-chat | cb833a8b-81d7-41a6-bff2-9d0927703113 | 0.0.1 | hfopenllm_v2/CYFRAGOVPL_Llama-PLLuM-8B-chat/1762652579.5008068 | 1762652579.500808 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | CYFRAGOVPL/Llama-PLLuM-8B-chat | CYFRAGOVPL/Llama-PLLuM-8B-chat | CYFRAGOVPL | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3514862786295917}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | CYFRAGOVPL | CYFRAGOVPL/PLLuM-12B-base | 76833817-781e-4292-9fe8-5e8a1da7f962 | 0.0.1 | hfopenllm_v2/CYFRAGOVPL_PLLuM-12B-base/1762652579.501051 | 1762652579.501052 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | CYFRAGOVPL/PLLuM-12B-base | CYFRAGOVPL/PLLuM-12B-base | CYFRAGOVPL | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2820937335159599}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | CYFRAGOVPL | CYFRAGOVPL/PLLuM-12B-nc-base | e9b90a3b-09c6-4d3b-9aa3-6279ea3cccb5 | 0.0.1 | hfopenllm_v2/CYFRAGOVPL_PLLuM-12B-nc-base/1762652579.501493 | 1762652579.501494 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | CYFRAGOVPL/PLLuM-12B-nc-base | CYFRAGOVPL/PLLuM-12B-nc-base | CYFRAGOVPL | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.24045310886226323}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | CYFRAGOVPL | CYFRAGOVPL/PLLuM-12B-chat | 6e325f0f-b5db-4773-8179-7e949bd3f5f2 | 0.0.1 | hfopenllm_v2/CYFRAGOVPL_PLLuM-12B-chat/1762652579.501271 | 1762652579.501272 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | CYFRAGOVPL/PLLuM-12B-chat | CYFRAGOVPL/PLLuM-12B-chat | CYFRAGOVPL | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.32143601200370575}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | CYFRAGOVPL | CYFRAGOVPL/PLLuM-12B-nc-chat | fd19dada-5945-45d5-8a84-122404b8dd57 | 0.0.1 | hfopenllm_v2/CYFRAGOVPL_PLLuM-12B-nc-chat/1762652579.501705 | 1762652579.501706 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | CYFRAGOVPL/PLLuM-12B-nc-chat | CYFRAGOVPL/PLLuM-12B-nc-chat | CYFRAGOVPL | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.28344237733657807}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | voidful | voidful/smol-360m-ft | b93d3a57-2535-4150-a2db-71a50569e6ae | 0.0.1 | hfopenllm_v2/voidful_smol-360m-ft/1762652580.589319 | 1762652580.58932 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | voidful/smol-360m-ft | voidful/smol-360m-ft | voidful | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2013103011121602}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 0.362} |
HF Open LLM v2 | netcat420 | netcat420/MFANNv0.24 | 59e5fcd0-e46f-4346-b695-bee4dab9cfc4 | 0.0.1 | hfopenllm_v2/netcat420_MFANNv0.24/1762652580.3999438 | 1762652580.3999438 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANNv0.24 | netcat420/MFANNv0.24 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3162409074588758}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | netcat420 | netcat420/MFANNv0.20 | 4c84cbc4-1a4d-45d9-909b-92d2b4e961b6 | 0.0.1 | hfopenllm_v2/netcat420_MFANNv0.20/1762652580.399081 | 1762652580.399082 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANNv0.20 | netcat420/MFANNv0.20 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.34786477657061043}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv0.23 | 28396f73-b949-4db0-b685-77fc5901770b | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv0.23/1762652580.39747 | 1762652580.397471 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv0.23 | netcat420/MFANN3bv0.23 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20480768804549704}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | netcat420 | netcat420/MFANNv0.19 | d2b0785d-a169-4773-a3fc-95b536fe3cc2 | 0.0.1 | hfopenllm_v2/netcat420_MFANNv0.19/1762652580.39887 | 1762652580.39887 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANNv0.19 | netcat420/MFANNv0.19 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.30567449921763146}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv0.19 | 4207b373-ef5c-48f8-a463-814b81a44410 | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv0.19/1762652580.396478 | 1762652580.396479 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv0.19 | netcat420/MFANN3bv0.19 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.22581528123157665}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | netcat420 | netcat420/DeepSeek-R1-MFANN-TIES-unretrained-7b | 43da500e-cdc7-4b70-a0eb-6ae3371670d9 | 0.0.1 | hfopenllm_v2/netcat420_DeepSeek-R1-MFANN-TIES-unretrained-7b/1762652580.3919501 | 1762652580.391951 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/DeepSeek-R1-MFANN-TIES-unretrained-7b | netcat420/DeepSeek-R1-MFANN-TIES-unretrained-7b | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2586880587951081}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | netcat420 | netcat420/MFANNv0.21 | 5d37ba65-09f6-4762-836e-4634c06ac9f7 | 0.0.1 | hfopenllm_v2/netcat420_MFANNv0.21/1762652580.399296 | 1762652580.399297 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANNv0.21 | netcat420/MFANNv0.21 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3233099287667832}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv0.21 | 053f6333-9722-4c3e-a5bb-246b273225de | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv0.21/1762652580.397045 | 1762652580.397046 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv0.21 | netcat420/MFANN3bv0.21 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1909189838517356}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv1.3 | 5981cb70-62a7-4e42-bf12-081c67c1b792 | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv1.3/1762652580.3983822 | 1762652580.3983831 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv1.3 | netcat420/MFANN3bv1.3 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.25466650709007654}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv1.2 | 16b4d316-db1d-4282-a5c0-b8ffe4af817c | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv1.2/1762652580.3980958 | 1762652580.3980958 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv1.2 | netcat420/MFANN3bv1.2 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2686050789682487}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.775} |
HF Open LLM v2 | netcat420 | netcat420/MFANNv0.22.1 | 5009ba04-1a8d-4e91-bd32-659fe67c4d26 | 0.0.1 | hfopenllm_v2/netcat420_MFANNv0.22.1/1762652580.3995059 | 1762652580.399507 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANNv0.22.1 | netcat420/MFANNv0.22.1 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3089469274857378}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv0.24 | 0081cd67-9178-4443-aebf-721b75c0fc77 | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv0.24/1762652580.397681 | 1762652580.397682 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv0.24 | netcat420/MFANN3bv0.24 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2200450360598767}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | netcat420 | netcat420/MFANN-SFT | 748c7e5a-697b-4763-a43e-e3b6a6f2951b | 0.0.1 | hfopenllm_v2/netcat420_MFANN-SFT/1762652580.393719 | 1762652580.3937201 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN-SFT | netcat420/MFANN-SFT | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.36822298168858625}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv0.20 | 2d36210e-e2ca-41a8-9434-c29168849a28 | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv0.20/1762652580.3967948 | 1762652580.396796 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv0.20 | netcat420/MFANN3bv0.20 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21934578030736224}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv0.15 | ebdb6805-f14e-4fb9-b1c8-acd258b93385 | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv0.15/1762652580.3958452 | 1762652580.395846 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv0.15 | netcat420/MFANN3bv0.15 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2012105657433388}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv1.1 | fb148468-c189-4fe5-b803-7532af8dec1d | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv1.1/1762652580.3978848 | 1762652580.397886 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv1.1 | netcat420/MFANN3bv1.1 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2506948230694557}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.775} |
HF Open LLM v2 | netcat420 | netcat420/MFANNv0.25 | e94f28ff-ae6c-4109-96a2-9dbe07621e03 | 0.0.1 | hfopenllm_v2/netcat420_MFANNv0.25/1762652580.400151 | 1762652580.400151 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANNv0.25 | netcat420/MFANNv0.25 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.34666573580322435}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3b | c5913e2b-c8c7-4e8f-a1c3-f2f764c8478d | 0.0.1 | hfopenllm_v2/netcat420_MFANN3b/1762652580.395648 | 1762652580.395648 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3b | netcat420/MFANN3b | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2524435165361241}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv0.22 | e551e936-41fa-4fda-84e9-dec9f5694c5d | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv0.22/1762652580.39726 | 1762652580.3972611 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv0.22 | netcat420/MFANN3bv0.22 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1979381374752324}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv0.18 | 5b522625-39ed-4faa-a3f6-1cec01baf906 | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv0.18/1762652580.396076 | 1762652580.396081 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv0.18 | netcat420/MFANN3bv0.18 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.22064455644356973}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | netcat420 | netcat420/MFANN3bv1.4 | 426bdea2-83f2-4915-9e82-ba4c8c8f4224 | 0.0.1 | hfopenllm_v2/netcat420_MFANN3bv1.4/1762652580.398614 | 1762652580.3986151 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN3bv1.4 | netcat420/MFANN3bv1.4 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.35243598097492435}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | netcat420 | netcat420/MFANNv0.23 | f7b617fa-7095-4eef-88bb-4fd73c23d5dc | 0.0.1 | hfopenllm_v2/netcat420_MFANNv0.23/1762652580.3997262 | 1762652580.399727 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANNv0.23 | netcat420/MFANNv0.23 | netcat420 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3127435205255389}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | ZhangShenao | ZhangShenao/SELM-Llama-3-8B-Instruct-iter-3 | 6bf4063b-44aa-4809-a400-5406abe5eb2e | 0.0.1 | hfopenllm_v2/ZhangShenao_SELM-Llama-3-8B-Instruct-iter-3/1762652579.9690418 | 1762652579.969043 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ZhangShenao/SELM-Llama-3-8B-Instruct-iter-3 | ZhangShenao/SELM-Llama-3-8B-Instruct-iter-3 | ZhangShenao | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6902817856620433}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | microsoft | microsoft/Orca-2-13b | 4f9c7197-1eb6-45eb-851e-46707017fe7f | 0.0.1 | hfopenllm_v2/microsoft_Orca-2-13b/1762652580.3541 | 1762652580.3541 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/Orca-2-13b | microsoft/Orca-2-13b | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3127933882099496}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 13.0} |
HF Open LLM v2 | microsoft | Orion-zhen/phi-4-abliterated | 3970f988-26f6-4810-839a-e5f4fcd6618a | 0.0.1 | hfopenllm_v2/Orion-zhen_phi-4-abliterated/1762652579.808864 | 1762652579.808865 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Orion-zhen/phi-4-abliterated | Orion-zhen/phi-4-abliterated | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.05760271634817839}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 14.66} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.