_leaderboard stringclasses 1
value | _developer stringclasses 559
values | _model stringlengths 9 102 | _uuid stringlengths 36 36 | schema_version stringclasses 1
value | evaluation_id stringlengths 35 133 | retrieved_timestamp stringlengths 13 18 | source_data stringclasses 1
value | evaluation_source_name stringclasses 1
value | evaluation_source_type stringclasses 1
value | source_organization_name stringclasses 1
value | source_organization_url null | source_organization_logo_url null | evaluator_relationship stringclasses 1
value | model_name stringlengths 4 102 | model_id stringlengths 9 102 | model_developer stringclasses 559
values | model_inference_platform stringclasses 1
value | evaluation_results stringlengths 1.35k 1.41k | additional_details stringclasses 660
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
HF Open LLM v2 | microsoft | suayptalha/Luminis-phi-4 | ace18207-a255-447d-9aba-8afdee092164 | 0.0.1 | hfopenllm_v2/suayptalha_Luminis-phi-4/1762652580.544511 | 1762652580.544511 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | suayptalha/Luminis-phi-4 | suayptalha/Luminis-phi-4 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6900069593124022}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | microsoft/Phi-3-medium-128k-instruct | 0c2670d3-1fb5-4825-860f-dc84dbd7bb99 | 0.0.1 | hfopenllm_v2/microsoft_Phi-3-medium-128k-instruct/1762652580.354526 | 1762652580.354527 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/Phi-3-medium-128k-instruct | microsoft/Phi-3-medium-128k-instruct | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6040029344361849}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 13.96} |
HF Open LLM v2 | microsoft | MaziyarPanahi/calme-2.1-phi3-4b | 79b4a850-85b6-45aa-8cc1-5210230a38aa | 0.0.1 | hfopenllm_v2/MaziyarPanahi_calme-2.1-phi3-4b/1762652579.751861 | 1762652579.751862 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | MaziyarPanahi/calme-2.1-phi3-4b | MaziyarPanahi/calme-2.1-phi3-4b | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.552520645221346}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | microsoft | MaziyarPanahi/calme-2.3-phi3-4b | 99b96f53-5ac6-4001-abc6-2a4e43f09028 | 0.0.1 | hfopenllm_v2/MaziyarPanahi_calme-2.3-phi3-4b/1762652579.755463 | 1762652579.755465 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | MaziyarPanahi/calme-2.3-phi3-4b | MaziyarPanahi/calme-2.3-phi3-4b | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.49264507063480456}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | microsoft | MaziyarPanahi/calme-2.1-phi3.5-4b | 69433e39-158a-46df-a987-ac2a6b3af2af | 0.0.1 | hfopenllm_v2/MaziyarPanahi_calme-2.1-phi3.5-4b/1762652579.752121 | 1762652579.7521222 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | MaziyarPanahi/calme-2.1-phi3.5-4b | MaziyarPanahi/calme-2.1-phi3.5-4b | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5659095644002359}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | microsoft | MaziyarPanahi/calme-2.2-phi3-4b | 56593987-babd-4a30-9a20-f83e7d233809 | 0.0.1 | hfopenllm_v2/MaziyarPanahi_calme-2.2-phi3-4b/1762652579.7536151 | 1762652579.7536159 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | MaziyarPanahi/calme-2.2-phi3-4b | MaziyarPanahi/calme-2.2-phi3-4b | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5069083365470286}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | microsoft | microsoft/Phi-4-mini-instruct | 1d02fe1c-f31d-4d38-a8c3-dc427e25cb80 | 0.0.1 | hfopenllm_v2/microsoft_Phi-4-mini-instruct/1762652580.356846 | 1762652580.356847 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/Phi-4-mini-instruct | microsoft/Phi-4-mini-instruct | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7377923908562614}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.836} |
HF Open LLM v2 | microsoft | NikolaSigmoid/phi-4-300steps | e54de9df-52e5-43d2-92c3-9d5207c0e335 | 0.0.1 | hfopenllm_v2/NikolaSigmoid_phi-4-300steps/1762652579.784649 | 1762652579.78465 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NikolaSigmoid/phi-4-300steps | NikolaSigmoid/phi-4-300steps | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.05607898154674043}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "", "params_billions": 14.704} |
HF Open LLM v2 | microsoft | NikolaSigmoid/phi-4-14b | cae2d4a1-4632-420f-be40-594f4c001d4d | 0.0.1 | hfopenllm_v2/NikolaSigmoid_phi-4-14b/1762652579.784184 | 1762652579.7841852 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NikolaSigmoid/phi-4-14b | NikolaSigmoid/phi-4-14b | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.05607898154674043}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "", "params_billions": 14.704} |
HF Open LLM v2 | microsoft | NikolaSigmoid/phi-4-1steps | a4763c48-f2ab-4f3e-bc1f-a7f4a9f33cf8 | 0.0.1 | hfopenllm_v2/NikolaSigmoid_phi-4-1steps/1762652579.784436 | 1762652579.784437 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NikolaSigmoid/phi-4-1steps | NikolaSigmoid/phi-4-1steps | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.05275668559422333}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "", "params_billions": 14.704} |
HF Open LLM v2 | microsoft | FINGU-AI/Phi-4-RRStock | 9d85345f-d46b-4431-b5fb-5cca99d92f21 | 0.0.1 | hfopenllm_v2/FINGU-AI_Phi-4-RRStock/1762652579.616194 | 1762652579.616194 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | FINGU-AI/Phi-4-RRStock | FINGU-AI/Phi-4-RRStock | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.28554125276488607}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 6.652} |
HF Open LLM v2 | microsoft | mkurman/phi-4-MedIT-11B-exp-1 | d64a8825-610a-4128-8c68-55150a76ed88 | 0.0.1 | hfopenllm_v2/mkurman_phi-4-MedIT-11B-exp-1/1762652580.3661451 | 1762652580.366146 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mkurman/phi-4-MedIT-11B-exp-1 | mkurman/phi-4-MedIT-11B-exp-1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5947607902587357}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 11.514} |
HF Open LLM v2 | microsoft | mkurman/phi4-MedIT-10B-o1 | c5a2a30d-99b0-4658-97f5-4c9be5576073 | 0.0.1 | hfopenllm_v2/mkurman_phi4-MedIT-10B-o1/1762652580.366463 | 1762652580.366464 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mkurman/phi4-MedIT-10B-o1 | mkurman/phi4-MedIT-10B-o1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.34629117408476173}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaMedITForCausalLM", "params_billions": 10.255} |
HF Open LLM v2 | microsoft | microsoft/Phi-3-mini-4k-instruct | 0c861cdd-1ddb-43a1-991b-300887e1da1b | 0.0.1 | hfopenllm_v2/microsoft_Phi-3-mini-4k-instruct/1762652580.355623 | 1762652580.355624 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/Phi-3-mini-4k-instruct | microsoft/Phi-3-mini-4k-instruct | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5612884923115112}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | microsoft | microsoft/Phi-3-mini-4k-instruct | 97e50198-ba06-4c17-81d3-59270b71a89d | 0.0.1 | hfopenllm_v2/microsoft_Phi-3-mini-4k-instruct/1762652580.355825 | 1762652580.355826 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/Phi-3-mini-4k-instruct | microsoft/Phi-3-mini-4k-instruct | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.547674614467391}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | microsoft | magnifi/Phi3_intent_v56_3_w_unknown_5_lr_0.002 | c78d1aaf-9975-45d6-9a8d-eed76f7e0a0f | 0.0.1 | hfopenllm_v2/magnifi_Phi3_intent_v56_3_w_unknown_5_lr_0.002/1762652580.32982 | 1762652580.329825 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | magnifi/Phi3_intent_v56_3_w_unknown_5_lr_0.002 | magnifi/Phi3_intent_v56_3_w_unknown_5_lr_0.002 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20181008612703183}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | microsoft | microsoft/Phi-3-medium-4k-instruct | 1b921ad2-9ed3-46d5-ab65-f125ce97b35f | 0.0.1 | hfopenllm_v2/microsoft_Phi-3-medium-4k-instruct/1762652580.354986 | 1762652580.35499 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/Phi-3-medium-4k-instruct | microsoft/Phi-3-medium-4k-instruct | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6422713954529538}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 13.96} |
HF Open LLM v2 | microsoft | rhysjones/phi-2-orange-v2 | bf679659-f55f-43c8-86b5-ed7805e8c3ee | 0.0.1 | hfopenllm_v2/rhysjones_phi-2-orange-v2/1762652580.495306 | 1762652580.495307 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | rhysjones/phi-2-orange-v2 | rhysjones/phi-2-orange-v2 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3669740732367895}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | microsoft | carsenk/phi3.5_mini_exp_825_uncensored | 68315e0a-603c-4784-a567-e342a6185c07 | 0.0.1 | hfopenllm_v2/carsenk_phi3.5_mini_exp_825_uncensored/1762652580.083884 | 1762652580.083887 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | carsenk/phi3.5_mini_exp_825_uncensored | carsenk/phi3.5_mini_exp_825_uncensored | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.13641360479084386}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | microsoft | fhai50032/Unaligned-Thinker-PHI-4 | bda90ce2-cb80-4942-8492-28329d7f5aeb | 0.0.1 | hfopenllm_v2/fhai50032_Unaligned-Thinker-PHI-4/1762652580.154337 | 1762652580.1543381 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | fhai50032/Unaligned-Thinker-PHI-4 | fhai50032/Unaligned-Thinker-PHI-4 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.056254072527560206}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on B... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | microsoft/Phi-3-small-128k-instruct | f7c1a443-006b-4ade-9b0f-895392e52b7c | 0.0.1 | hfopenllm_v2/microsoft_Phi-3-small-128k-instruct/1762652580.356006 | 1762652580.356006 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/Phi-3-small-128k-instruct | microsoft/Phi-3-small-128k-instruct | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6368258443153056}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3SmallForCausalLM", "params_billions": 7.392} |
HF Open LLM v2 | microsoft | migtissera/Tess-v2.5-Phi-3-medium-128k-14B | 260f2500-c920-4e3f-901b-10efc03f0390 | 0.0.1 | hfopenllm_v2/migtissera_Tess-v2.5-Phi-3-medium-128k-14B/1762652580.35902 | 1762652580.359021 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | migtissera/Tess-v2.5-Phi-3-medium-128k-14B | migtissera/Tess-v2.5-Phi-3-medium-128k-14B | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.45387682460316403}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 13.96} |
HF Open LLM v2 | microsoft | EpistemeAI/Fireball-12B-v1.13a-philosophers | 38fae832-3d96-457d-851b-7fcded3f7796 | 0.0.1 | hfopenllm_v2/EpistemeAI_Fireball-12B-v1.13a-philosophers/1762652579.60018 | 1762652579.600181 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EpistemeAI/Fireball-12B-v1.13a-philosophers | EpistemeAI/Fireball-12B-v1.13a-philosophers | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.08755324760524298}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.0} |
HF Open LLM v2 | microsoft | EpistemeAI/DeepThinkers-Phi4 | 3c97155d-c086-42aa-af12-14316fcf723c | 0.0.1 | hfopenllm_v2/EpistemeAI_DeepThinkers-Phi4/1762652579.599432 | 1762652579.599433 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EpistemeAI/DeepThinkers-Phi4 | EpistemeAI/DeepThinkers-Phi4 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6939786433330231}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | DreadPoor/Morphing-8B-Model_Stock | 0fd25475-5202-4cd1-b399-bfb8e113d85b | 0.0.1 | hfopenllm_v2/DreadPoor_Morphing-8B-Model_Stock/1762652579.577464 | 1762652579.577465 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DreadPoor/Morphing-8B-Model_Stock | DreadPoor/Morphing-8B-Model_Stock | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.744536718130117}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | microsoft | Xiaojian9992024/Phi-4-mini-UNOFFICAL | 058de011-1e80-4a6d-803f-8ba7f927cd7f | 0.0.1 | hfopenllm_v2/Xiaojian9992024_Phi-4-mini-UNOFFICAL/1762652579.9531882 | 1762652579.9531891 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Xiaojian9992024/Phi-4-mini-UNOFFICAL | Xiaojian9992024/Phi-4-mini-UNOFFICAL | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.12732106366662677}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.754} |
HF Open LLM v2 | microsoft | Xiaojian9992024/Phi-4-Megatron-Empathetic | aec0af15-927b-48bd-a889-d4715aff4c42 | 0.0.1 | hfopenllm_v2/Xiaojian9992024_Phi-4-Megatron-Empathetic/1762652579.952935 | 1762652579.952936 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Xiaojian9992024/Phi-4-Megatron-Empathetic | Xiaojian9992024/Phi-4-Megatron-Empathetic | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.01726086783068924}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | Triangle104/Phi4-RP-o1 | 9ed49666-aee1-43d0-8c7c-98c178860f0c | 0.0.1 | hfopenllm_v2/Triangle104_Phi4-RP-o1/1762652579.9312892 | 1762652579.9312901 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/Phi4-RP-o1 | Triangle104/Phi4-RP-o1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.022007163215822904}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on B... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | Triangle104/Phi-4-AbliteratedRP | ef628438-c2ff-4939-8bf1-09f1df25fd15 | 0.0.1 | hfopenllm_v2/Triangle104_Phi-4-AbliteratedRP/1762652579.931047 | 1762652579.931048 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/Phi-4-AbliteratedRP | Triangle104/Phi-4-AbliteratedRP | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.49227050891634194}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Phi3ForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | Triangle104/Phi4-RP-o1-Ablit | c3578998-b9dc-4b42-a8cb-0bdf05cffc9f | 0.0.1 | hfopenllm_v2/Triangle104_Phi4-RP-o1-Ablit/1762652579.93156 | 1762652579.93156 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/Phi4-RP-o1-Ablit | Triangle104/Phi4-RP-o1-Ablit | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.02385559205131274}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | Danielbrdz/Barcenas-14b-phi-4 | 720029f0-41d5-4161-878e-4218f230455c | 0.0.1 | hfopenllm_v2/Danielbrdz_Barcenas-14b-phi-4/1762652579.533744 | 1762652579.533744 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Danielbrdz/Barcenas-14b-phi-4 | Danielbrdz/Barcenas-14b-phi-4 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.0497590836757581}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | Danielbrdz/Barcenas-14b-phi-4-v2 | 4180c069-33e8-4109-9d35-dde82549ba26 | 0.0.1 | hfopenllm_v2/Danielbrdz_Barcenas-14b-phi-4-v2/1762652579.533969 | 1762652579.533969 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Danielbrdz/Barcenas-14b-phi-4-v2 | Danielbrdz/Barcenas-14b-phi-4-v2 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.27747266142723526}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO | f9ce1ec0-e727-474b-acb7-1ba49311e355 | 0.0.1 | hfopenllm_v2/Danielbrdz_Barcenas-14b-Phi-3-medium-ORPO/1762652579.53347 | 1762652579.5334709 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO | Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4799055395240185}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 13.96} |
HF Open LLM v2 | microsoft | abideen/MedPhi-4-14B-v1 | 0367a9de-960b-4c1d-8e63-8dea06197bfa | 0.0.1 | hfopenllm_v2/abideen_MedPhi-4-14B-v1/1762652579.973941 | 1762652579.973942 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | abideen/MedPhi-4-14B-v1 | abideen/MedPhi-4-14B-v1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6276834355066778}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | cognitivecomputations/dolphin-2.9.2-Phi-3-Medium-abliterated | 958ad3b8-9b65-4165-9d3c-a49e25802fd3 | 0.0.1 | hfopenllm_v2/cognitivecomputations_dolphin-2.9.2-Phi-3-Medium-abliterated/1762652580.114508 | 1762652580.114509 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/dolphin-2.9.2-Phi-3-Medium-abliterated | cognitivecomputations/dolphin-2.9.2-Phi-3-Medium-abliterated | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4123614232458765}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 13.96} |
HF Open LLM v2 | microsoft | cognitivecomputations/dolphin-2.9.2-Phi-3-Medium-abliterated | 6f89f55f-a259-419a-b6ad-9b01b2dae9d8 | 0.0.1 | hfopenllm_v2/cognitivecomputations_dolphin-2.9.2-Phi-3-Medium-abliterated/1762652580.1142762 | 1762652580.1142762 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/dolphin-2.9.2-Phi-3-Medium-abliterated | cognitivecomputations/dolphin-2.9.2-Phi-3-Medium-abliterated | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.36125369574950017}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 13.96} |
HF Open LLM v2 | microsoft | cognitivecomputations/Dolphin3.0-R1-Mistral-24B | 8a641aee-1604-4910-8164-9e6d5c0652b1 | 0.0.1 | hfopenllm_v2/cognitivecomputations_Dolphin3.0-R1-Mistral-24B/1762652580.112771 | 1762652580.112771 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/Dolphin3.0-R1-Mistral-24B | cognitivecomputations/Dolphin3.0-R1-Mistral-24B | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.406816136739407}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
HF Open LLM v2 | microsoft | cognitivecomputations/dolphin-2.9.3-Yi-1.5-34B-32k | 0e625490-b7b1-4b64-aa1e-222c4e21d7a5 | 0.0.1 | hfopenllm_v2/cognitivecomputations_dolphin-2.9.3-Yi-1.5-34B-32k/1762652580.115152 | 1762652580.115152 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/dolphin-2.9.3-Yi-1.5-34B-32k | cognitivecomputations/dolphin-2.9.3-Yi-1.5-34B-32k | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3639266036339136}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 34.0} |
HF Open LLM v2 | microsoft | cognitivecomputations/dolphin-2.9.1-yi-1.5-9b | e1003371-d503-469d-ae41-e813d097ea43 | 0.0.1 | hfopenllm_v2/cognitivecomputations_dolphin-2.9.1-yi-1.5-9b/1762652580.113816 | 1762652580.113816 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/dolphin-2.9.1-yi-1.5-9b | cognitivecomputations/dolphin-2.9.1-yi-1.5-9b | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.44653297694561545}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.829} |
HF Open LLM v2 | microsoft | cognitivecomputations/dolphin-2.9.1-yi-1.5-34b | 4e6cb7a6-f01d-4e25-be2f-bda77af2eaf6 | 0.0.1 | hfopenllm_v2/cognitivecomputations_dolphin-2.9.1-yi-1.5-34b/1762652580.113518 | 1762652580.1135192 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/dolphin-2.9.1-yi-1.5-34b | cognitivecomputations/dolphin-2.9.1-yi-1.5-34b | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3852588908540451}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 34.389} |
HF Open LLM v2 | microsoft | cognitivecomputations/dolphin-2.9.2-Phi-3-Medium | 36476eb7-a89a-45e1-b423-7755edfd5be1 | 0.0.1 | hfopenllm_v2/cognitivecomputations_dolphin-2.9.2-Phi-3-Medium/1762652580.114048 | 1762652580.114049 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/dolphin-2.9.2-Phi-3-Medium | cognitivecomputations/dolphin-2.9.2-Phi-3-Medium | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4247762603226107}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": -1.0} |
HF Open LLM v2 | microsoft | cognitivecomputations/dolphin-2.9.3-mistral-nemo-12b | 05488c6f-dfd4-4481-a3d4-15a918b115d3 | 0.0.1 | hfopenllm_v2/cognitivecomputations_dolphin-2.9.3-mistral-nemo-12b/1762652580.115594 | 1762652580.115595 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/dolphin-2.9.3-mistral-nemo-12b | cognitivecomputations/dolphin-2.9.3-mistral-nemo-12b | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5600894515441251}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | microsoft | cognitivecomputations/dolphin-2.9.3-mistral-7B-32k | 4a0bc836-88b7-4d6e-9f0d-321ff75b1733 | 0.0.1 | hfopenllm_v2/cognitivecomputations_dolphin-2.9.3-mistral-7B-32k/1762652580.1153762 | 1762652580.115377 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/dolphin-2.9.3-mistral-7B-32k | cognitivecomputations/dolphin-2.9.3-mistral-7B-32k | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4126362495955177}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.248} |
HF Open LLM v2 | microsoft | tensopolis/phi-4-tensopolis-v1 | bcbdde44-0736-4162-9faf-cd9d8e89d360 | 0.0.1 | hfopenllm_v2/tensopolis_phi-4-tensopolis-v1/1762652580.5562031 | 1762652580.5562031 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tensopolis/phi-4-tensopolis-v1 | tensopolis/phi-4-tensopolis-v1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6766679078179231}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | microsoft/Orca-2-7b | c13a5d55-44f7-43fc-a633-9af7677a26fb | 0.0.1 | hfopenllm_v2/microsoft_Orca-2-7b/1762652580.354311 | 1762652580.354312 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/Orca-2-7b | microsoft/Orca-2-7b | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2183462102776189}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | microsoft | Youlln/3PRYMMAL-PHI3-3B-SLERP | 2c53181b-8681-46ad-b739-396b1ecb163c | 0.0.1 | hfopenllm_v2/Youlln_3PRYMMAL-PHI3-3B-SLERP/1762652579.9609358 | 1762652579.960937 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/3PRYMMAL-PHI3-3B-SLERP | Youlln/3PRYMMAL-PHI3-3B-SLERP | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3655500738041729}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.0} |
HF Open LLM v2 | microsoft | prithivMLmods/Phi-4-Super-o1 | b90749f4-0542-42b6-a708-4e14bc586ad1 | 0.0.1 | hfopenllm_v2/prithivMLmods_Phi-4-Super-o1/1762652580.470741 | 1762652580.470741 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Phi-4-Super-o1 | prithivMLmods/Phi-4-Super-o1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.04176584795010572}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | prithivMLmods/Phi-4-Math-IO | 88c03059-5add-46ea-b423-4cf8496c5763 | 0.0.1 | hfopenllm_v2/prithivMLmods_Phi-4-Math-IO/1762652580.469801 | 1762652580.469801 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Phi-4-Math-IO | prithivMLmods/Phi-4-Math-IO | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.05897684809638426}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | prithivMLmods/Phi4-Super | 07ee76dd-a928-469b-912e-cfd2e0a26ef9 | 0.0.1 | hfopenllm_v2/prithivMLmods_Phi4-Super/1762652580.471183 | 1762652580.4711838 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Phi4-Super | prithivMLmods/Phi4-Super | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.04813561350549875}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | prithivMLmods/Phi-4-Empathetic | a7a2af83-7047-4601-bfdd-ac25abf3890d | 0.0.1 | hfopenllm_v2/prithivMLmods_Phi-4-Empathetic/1762652580.469516 | 1762652580.469517 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Phi-4-Empathetic | prithivMLmods/Phi-4-Empathetic | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.049659348306936704}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on B... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | prithivMLmods/Phi-4-QwQ | 8e84f2de-117a-4526-9d58-86a63011a07f | 0.0.1 | hfopenllm_v2/prithivMLmods_Phi-4-QwQ/1762652580.470021 | 1762652580.470022 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Phi-4-QwQ | prithivMLmods/Phi-4-QwQ | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.05592937849350833}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | prithivMLmods/Phi-4-Super-1 | 91c5f088-38fd-4ea7-bf95-3d6a69653cca | 0.0.1 | hfopenllm_v2/prithivMLmods_Phi-4-Super-1/1762652580.470496 | 1762652580.470498 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Phi-4-Super-1 | prithivMLmods/Phi-4-Super-1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.04176584795010572}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | prithivMLmods/Phi-4-o1 | d58bf1bb-e269-4741-a9f1-be242443ad4a | 0.0.1 | hfopenllm_v2/prithivMLmods_Phi-4-o1/1762652580.470958 | 1762652580.4709592 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Phi-4-o1 | prithivMLmods/Phi-4-o1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.028976449154908976}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on B... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | prithivMLmods/Phi-4-Super | ec19309c-9bbe-4d42-894d-3638dbe5dfac | 0.0.1 | hfopenllm_v2/prithivMLmods_Phi-4-Super/1762652580.470242 | 1762652580.470242 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Phi-4-Super | prithivMLmods/Phi-4-Super | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.04813561350549875}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | HeraiHench/Phi-4-slerp-ReasoningRP-14B | ca0a3f22-099f-4207-acfe-4b70aa00171e | 0.0.1 | hfopenllm_v2/HeraiHench_Phi-4-slerp-ReasoningRP-14B/1762652579.639999 | 1762652579.64 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | HeraiHench/Phi-4-slerp-ReasoningRP-14B | HeraiHench/Phi-4-slerp-ReasoningRP-14B | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.15754642127333254}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 9.207} |
HF Open LLM v2 | microsoft | VAGOsolutions/SauerkrautLM-Phi-3-medium | ae8b39a7-7fca-441f-bae3-8db76879cefe | 0.0.1 | hfopenllm_v2/VAGOsolutions_SauerkrautLM-Phi-3-medium/1762652579.942282 | 1762652579.942282 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/SauerkrautLM-Phi-3-medium | VAGOsolutions/SauerkrautLM-Phi-3-medium | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4408879550703245}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 13.96} |
HF Open LLM v2 | microsoft | Undi95/Phi4-abliterated | 29c3f781-f49c-4afc-bbc4-a47aebc91f71 | 0.0.1 | hfopenllm_v2/Undi95_Phi4-abliterated/1762652579.9391701 | 1762652579.939171 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Undi95/Phi4-abliterated | Undi95/Phi4-abliterated | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6617552538375954}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | Novaciano/Fusetrix-Dolphin-3.2-1B-GRPO_Creative_RP | 582f87ef-50c5-4a5b-9d76-bc71f97bd2fb | 0.0.1 | hfopenllm_v2/Novaciano_Fusetrix-Dolphin-3.2-1B-GRPO_Creative_RP/1762652579.7955709 | 1762652579.795572 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Novaciano/Fusetrix-Dolphin-3.2-1B-GRPO_Creative_RP | Novaciano/Fusetrix-Dolphin-3.2-1B-GRPO_Creative_RP | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5342856952885011}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | microsoft | NyxKrage/Microsoft_Phi-4 | 46494bad-fb41-4fa3-b568-be4e6a22ae5b | 0.0.1 | hfopenllm_v2/NyxKrage_Microsoft_Phi-4/1762652579.7969122 | 1762652579.796913 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NyxKrage/Microsoft_Phi-4 | NyxKrage/Microsoft_Phi-4 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.0585269307659233}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | hotmailuser/Phi4-Slerp4-14B | da866c81-296f-463c-962b-6b871d6fb633 | 0.0.1 | hfopenllm_v2/hotmailuser_Phi4-Slerp4-14B/1762652580.1958668 | 1762652580.195868 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | hotmailuser/Phi4-Slerp4-14B | hotmailuser/Phi4-Slerp4-14B | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.0629485321170051}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | allknowingroger/MistralPhi3-11B | f7f557cf-4c63-444a-8c8f-515796b9b127 | 0.0.1 | hfopenllm_v2/allknowingroger_MistralPhi3-11B/1762652579.990464 | 1762652579.990464 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | allknowingroger/MistralPhi3-11B | allknowingroger/MistralPhi3-11B | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1942911474886634}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 11.234} |
HF Open LLM v2 | microsoft | allknowingroger/ROGERphi-7B-slerp | 9e7ef237-2e59-429d-9784-45de952f60af | 0.0.1 | hfopenllm_v2/allknowingroger_ROGERphi-7B-slerp/1762652580.0022678 | 1762652580.002269 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | allknowingroger/ROGERphi-7B-slerp | allknowingroger/ROGERphi-7B-slerp | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3861332375873793}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | microsoft | allknowingroger/Phi3mash1-17B-pass | 83ec9172-5769-4737-a766-0ca2006dd3e4 | 0.0.1 | hfopenllm_v2/allknowingroger_Phi3mash1-17B-pass/1762652579.997936 | 1762652579.997937 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | allknowingroger/Phi3mash1-17B-pass | allknowingroger/Phi3mash1-17B-pass | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18842116694814204}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 16.687} |
HF Open LLM v2 | microsoft | uukuguy/speechless-mistral-dolphin-orca-platypus-samantha-7b | 49cd8aff-0c7a-4245-831a-f4fc64383b48 | 0.0.1 | hfopenllm_v2/uukuguy_speechless-mistral-dolphin-orca-platypus-samantha-7b/1762652580.583631 | 1762652580.5836318 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | uukuguy/speechless-mistral-dolphin-orca-platypus-samantha-7b | uukuguy/speechless-mistral-dolphin-orca-platypus-samantha-7b | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.37002154283966543}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | microsoft | Josephgflowers/Cinder-Phi-2-V1-F16-gguf | 4d0a565c-14b2-4ce9-97c0-4d114548fe48 | 0.0.1 | hfopenllm_v2/Josephgflowers_Cinder-Phi-2-V1-F16-gguf/1762652579.694953 | 1762652579.694954 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Josephgflowers/Cinder-Phi-2-V1-F16-gguf | Josephgflowers/Cinder-Phi-2-V1-F16-gguf | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.23565694579271884}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | microsoft | Sakalti/Phi3.5-Comets-3.8B | 7d9a3955-232c-4a93-b879-bd065bab4768 | 0.0.1 | hfopenllm_v2/Sakalti_Phi3.5-Comets-3.8B/1762652579.858093 | 1762652579.858093 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Phi3.5-Comets-3.8B | Sakalti/Phi3.5-Comets-3.8B | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20942876013422163}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | microsoft | SicariusSicariiStuff/Phi-lthy4 | 56fa06dd-fd07-4613-9ac5-81c739cb6a64 | 0.0.1 | hfopenllm_v2/SicariusSicariiStuff_Phi-lthy4/1762652579.883529 | 1762652579.88353 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SicariusSicariiStuff/Phi-lthy4 | SicariusSicariiStuff/Phi-lthy4 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7679423928509688}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 11.933} |
HF Open LLM v2 | microsoft | SicariusSicariiStuff/Phi-Line_14B | 12b2a13d-2b38-47e6-a6d2-3d5a30bff5ae | 0.0.1 | hfopenllm_v2/SicariusSicariiStuff_Phi-Line_14B/1762652579.8832798 | 1762652579.8832798 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SicariusSicariiStuff/Phi-Line_14B | SicariusSicariiStuff/Phi-Line_14B | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6495653754260917}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-RR-Shoup | 377bc688-a18e-4abb-91f7-d78a934e1649 | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-RR-Shoup/1762652580.050983 | 1762652580.050983 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-RR-Shoup | bunnycore/Phi-4-RR-Shoup | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6586579165503088}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-Stock-RP | 69724e46-4038-4d3a-a8ff-e84a56bba9e8 | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-Stock-RP/1762652580.0521228 | 1762652580.0521228 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-Stock-RP | bunnycore/Phi-4-Stock-RP | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6399231816025922}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-Sce-exp-v0.1 | c8de0acd-7cce-45c0-9032-2b717f3917b8 | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-Sce-exp-v0.1/1762652580.0516632 | 1762652580.0516639 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-Sce-exp-v0.1 | bunnycore/Phi-4-Sce-exp-v0.1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6595322632836429}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-ReasoningRP | 5db77608-f892-4ac4-93c4-03f177696484 | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-ReasoningRP/1762652580.05142 | 1762652580.051421 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-ReasoningRP | bunnycore/Phi-4-ReasoningRP | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6736204382150472}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-Trim-Exp1 | c13c2fd7-e271-4935-a3a6-4161cb8e4ea2 | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-Trim-Exp1/1762652580.052348 | 1762652580.052348 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-Trim-Exp1 | bunnycore/Phi-4-Trim-Exp1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.12192538021338936}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 7.503} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-Model-Stock-v4 | 92363115-37f2-4d2f-8178-61fc98c8f337 | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-Model-Stock-v4/1762652580.0505521 | 1762652580.050553 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-Model-Stock-v4 | bunnycore/Phi-4-Model-Stock-v4 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7110145524984818}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-3.5-mini-TitanFusion-0.1 | 60823e05-59e3-4c4c-a23e-8ef495aa39be | 0.0.1 | hfopenllm_v2/bunnycore_Phi-3.5-mini-TitanFusion-0.1/1762652580.04916 | 1762652580.049161 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-3.5-mini-TitanFusion-0.1 | bunnycore/Phi-3.5-mini-TitanFusion-0.1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5227950726295119}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-RStock-v0.1 | cf300641-1ec3-4ee7-b38d-b274ebc23ff2 | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-RStock-v0.1/1762652580.051188 | 1762652580.051189 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-RStock-v0.1 | bunnycore/Phi-4-RStock-v0.1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7018721436898541}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-Stock-Ex | bc007572-56ff-449a-9e3d-5ab770c3ae44 | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-Stock-Ex/1762652580.051897 | 1762652580.051897 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-Stock-Ex | bunnycore/Phi-4-Stock-Ex | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6574588757829227}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-Model-Stock-v2 | 5bc6e404-5798-4d19-88d1-5a8153947227 | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-Model-Stock-v2/1762652580.050115 | 1762652580.050116 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-Model-Stock-v2 | bunnycore/Phi-4-Model-Stock-v2 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.63752510006782}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH",... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-Model-Stock-v3 | 5832ef9b-bd14-46ba-b04d-049280bc5267 | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-Model-Stock-v3/1762652580.050334 | 1762652580.050335 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-Model-Stock-v3 | bunnycore/Phi-4-Model-Stock-v3 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5911636679565775}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-RP-v0 | 29135c1b-e6a0-428a-ba4f-459e9b652d25 | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-RP-v0/1762652580.050766 | 1762652580.0507672 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-RP-v0 | bunnycore/Phi-4-RP-v0 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6827129793392643}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-4-Model-Stock | cee9b876-96b3-4429-af70-6a5b45747a3b | 0.0.1 | hfopenllm_v2/bunnycore_Phi-4-Model-Stock/1762652580.0497222 | 1762652580.049727 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-4-Model-Stock | bunnycore/Phi-4-Model-Stock | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6878837041272712}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | bunnycore/Phi-Seek-4-Sce-V1 | 75810fb9-99b5-4707-80a8-8974bbb0844d | 0.0.1 | hfopenllm_v2/bunnycore_Phi-Seek-4-Sce-V1/1762652580.052572 | 1762652580.052573 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Phi-Seek-4-Sce-V1 | bunnycore/Phi-Seek-4-Sce-V1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.29348462080612775}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | mrm8488/phi-4-14B-grpo-limo | e671d26c-1d8a-4d22-b360-dc3e449886b8 | 0.0.1 | hfopenllm_v2/mrm8488_phi-4-14B-grpo-limo/1762652580.374649 | 1762652580.37465 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mrm8488/phi-4-14B-grpo-limo | mrm8488/phi-4-14B-grpo-limo | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.681239112222237}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | mrm8488/phi-4-14B-grpo-gsm8k-3e | 1bd4d2fe-cd83-4a79-b102-40be8ebb6245 | 0.0.1 | hfopenllm_v2/mrm8488_phi-4-14B-grpo-gsm8k-3e/1762652580.374398 | 1762652580.374399 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mrm8488/phi-4-14B-grpo-gsm8k-3e | mrm8488/phi-4-14B-grpo-gsm8k-3e | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.688533092195375}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | microsoft/Phi-3.5-MoE-instruct | ae57c3e7-4042-43eb-baa2-b033d1b4867c | 0.0.1 | hfopenllm_v2/microsoft_Phi-3.5-MoE-instruct/1762652580.356415 | 1762652580.356415 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/Phi-3.5-MoE-instruct | microsoft/Phi-3.5-MoE-instruct | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.692454908531585}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 42.0} |
HF Open LLM v2 | microsoft | unsloth/phi-4 | c6080b92-d05a-4bda-ad07-e1b59a427844 | 0.0.1 | hfopenllm_v2/unsloth_phi-4/1762652580.579377 | 1762652580.579378 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | unsloth/phi-4 | unsloth/phi-4 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6882083981613231}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | unsloth/phi-4-unsloth-bnb-4bit | 3bdd8e19-fd61-4d1e-96b1-cdadd4c2d67f | 0.0.1 | hfopenllm_v2/unsloth_phi-4-unsloth-bnb-4bit/1762652580.579966 | 1762652580.579967 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | unsloth/phi-4-unsloth-bnb-4bit | unsloth/phi-4-unsloth-bnb-4bit | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6793906833867471}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.483} |
HF Open LLM v2 | microsoft | unsloth/phi-4-bnb-4bit | c8cfc527-9a58-45e7-a8e0-39caacd8bd58 | 0.0.1 | hfopenllm_v2/unsloth_phi-4-bnb-4bit/1762652580.579705 | 1762652580.579705 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | unsloth/phi-4-bnb-4bit | unsloth/phi-4-bnb-4bit | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6729710501469435}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.058} |
HF Open LLM v2 | microsoft | BlackBeenie/Neos-Phi-3-14B-v0.1 | 6d6aa9c5-cb3f-4c30-bd1a-ba951c9ad0e8 | 0.0.1 | hfopenllm_v2/BlackBeenie_Neos-Phi-3-14B-v0.1/1762652579.4966102 | 1762652579.496611 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | BlackBeenie/Neos-Phi-3-14B-v0.1 | BlackBeenie/Neos-Phi-3-14B-v0.1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4022449323350931}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 13.96} |
HF Open LLM v2 | microsoft | benhaotang/phi4-qwq-sky-t1 | 08f1ef63-efc7-449c-92cf-6f180b9d2712 | 0.0.1 | hfopenllm_v2/benhaotang_phi4-qwq-sky-t1/1762652580.030136 | 1762652580.030137 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | benhaotang/phi4-qwq-sky-t1 | benhaotang/phi4-qwq-sky-t1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.04596249063595704}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | microsoft/Phi-3-small-8k-instruct | f4c62b5d-fc1d-4421-9be8-e7e4af642284 | 0.0.1 | hfopenllm_v2/microsoft_Phi-3-small-8k-instruct/1762652580.356211 | 1762652580.356212 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/Phi-3-small-8k-instruct | microsoft/Phi-3-small-8k-instruct | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6496651107949131}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3SmallForCausalLM", "params_billions": 7.392} |
HF Open LLM v2 | microsoft | netcat420/MFANN-phigments-slerp-V3.2 | 8c4e85ce-7b8f-479c-a1dc-114c7e5ba4f1 | 0.0.1 | hfopenllm_v2/netcat420_MFANN-phigments-slerp-V3.2/1762652580.395236 | 1762652580.395236 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN-phigments-slerp-V3.2 | netcat420/MFANN-phigments-slerp-V3.2 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.35243598097492435}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | microsoft | netcat420/MFANN-phigments-slerp-V2 | 8b4f2ab4-dcd7-4c5d-9bd0-6d7e1580c123 | 0.0.1 | hfopenllm_v2/netcat420_MFANN-phigments-slerp-V2/1762652580.3950222 | 1762652580.395023 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN-phigments-slerp-V2 | netcat420/MFANN-phigments-slerp-V2 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.32316032571355113}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | microsoft | netcat420/MFANN-abliterated-phi2-merge-unretrained | a3c07d22-20d1-4878-80d5-04b949580829 | 0.0.1 | hfopenllm_v2/netcat420_MFANN-abliterated-phi2-merge-unretrained/1762652580.3939252 | 1762652580.393926 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN-abliterated-phi2-merge-unretrained | netcat420/MFANN-abliterated-phi2-merge-unretrained | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3005037744296245}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.775} |
HF Open LLM v2 | microsoft | netcat420/MFANN-phigments-slerp-V3.3 | b3466ac6-df1f-4440-9d7b-7991cac7d733 | 0.0.1 | hfopenllm_v2/netcat420_MFANN-phigments-slerp-V3.3/1762652580.395446 | 1762652580.395447 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | netcat420/MFANN-phigments-slerp-V3.3 | netcat420/MFANN-phigments-slerp-V3.3 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.36909732842192056}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | microsoft | microsoft/phi-1_5 | 0bc55439-f6a1-4588-858a-082907876d6e | 0.0.1 | hfopenllm_v2/microsoft_phi-1_5/1762652580.357298 | 1762652580.357298 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/phi-1_5 | microsoft/phi-1_5 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2032839532440591}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 1.418} |
HF Open LLM v2 | microsoft | microsoft/phi-4 | 5481936f-d52a-486b-871e-d2e48c1b0278 | 0.0.1 | hfopenllm_v2/microsoft_phi-4/1762652580.357901 | 1762652580.357902 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/phi-4 | microsoft/phi-4 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.0585269307659233}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | microsoft/phi-4 | f3ee4f04-22f1-4ddb-afb2-27b8f641042b | 0.0.1 | hfopenllm_v2/microsoft_phi-4/1762652580.3577 | 1762652580.357701 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/phi-4 | microsoft/phi-4 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.048785001573602486}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on B... | {"precision": "float16", "architecture": "Phi3ForCausalLM", "params_billions": 14.66} |
HF Open LLM v2 | microsoft | microsoft/phi-2 | e38ef3e4-585f-46de-beb4-c794d767b579 | 0.0.1 | hfopenllm_v2/microsoft_phi-2/1762652580.357496 | 1762652580.357497 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/phi-2 | microsoft/phi-2 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.273875539125077}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "PhiForCausalLM", "params_billions": 2.78} |
HF Open LLM v2 | microsoft | microsoft/phi-1 | b88d579f-6bc7-4aee-a117-28786cba3300 | 0.0.1 | hfopenllm_v2/microsoft_phi-1/1762652580.357049 | 1762652580.3570502 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/phi-1 | microsoft/phi-1 | microsoft | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20680571993421898}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "PhiForCausalLM", "params_billions": 1.418} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.