_leaderboard stringclasses 1
value | _developer stringclasses 559
values | _model stringlengths 9 102 | _uuid stringlengths 36 36 | schema_version stringclasses 1
value | evaluation_id stringlengths 35 133 | retrieved_timestamp stringlengths 13 18 | source_data stringclasses 1
value | evaluation_source_name stringclasses 1
value | evaluation_source_type stringclasses 1
value | source_organization_name stringclasses 1
value | source_organization_url null | source_organization_logo_url null | evaluator_relationship stringclasses 1
value | model_name stringlengths 4 102 | model_id stringlengths 9 102 | model_developer stringclasses 559
values | model_inference_platform stringclasses 1
value | evaluation_results stringlengths 1.35k 1.41k | additional_details stringclasses 660
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
HF Open LLM v2 | meta | mlabonne/OrpoLlama-3-8B | b8b5b30e-d259-49ae-8155-7f63ddae88c8 | 0.0.1 | hfopenllm_v2/mlabonne_OrpoLlama-3-8B/1762652580.369958 | 1762652580.3699589 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mlabonne/OrpoLlama-3-8B | mlabonne/OrpoLlama-3-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.36527524745453177}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | mlabonne/ChimeraLlama-3-8B-v3 | eef221de-8dc3-410a-943d-900c810948ae | 0.0.1 | hfopenllm_v2/mlabonne_ChimeraLlama-3-8B-v3/1762652580.3683012 | 1762652580.3683012 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mlabonne/ChimeraLlama-3-8B-v3 | mlabonne/ChimeraLlama-3-8B-v3 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.44078821970150317}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | mlabonne/Hermes-3-Llama-3.1-70B-lorablated | 07190707-16fb-47fc-9813-4f2408a04bdb | 0.0.1 | hfopenllm_v2/mlabonne_Hermes-3-Llama-3.1-70B-lorablated/1762652580.368906 | 1762652580.368906 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mlabonne/Hermes-3-Llama-3.1-70B-lorablated | mlabonne/Hermes-3-Llama-3.1-70B-lorablated | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.34244360518978534}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | meta | sabersalehk/Llama3-SimPO | b88f3d13-a8ed-4e23-86ec-1531c3151f0f | 0.0.1 | hfopenllm_v2/sabersalehk_Llama3-SimPO/1762652580.505101 | 1762652580.5051022 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sabersalehk/Llama3-SimPO | sabersalehk/Llama3-SimPO | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.36420142998355476}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | sabersalehk/Llama3-001-300 | f73009ad-891e-41e7-a6bc-a271894f5511 | 0.0.1 | hfopenllm_v2/sabersalehk_Llama3-001-300/1762652580.504826 | 1762652580.504826 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sabersalehk/Llama3-001-300 | sabersalehk/Llama3-001-300 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3178643776291351}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | sabersalehk/Llama3_001_200 | f673b2f9-8b77-42a3-9066-29f21a1ca0f8 | 0.0.1 | hfopenllm_v2/sabersalehk_Llama3_001_200/1762652580.505313 | 1762652580.505314 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sabersalehk/Llama3_001_200 | sabersalehk/Llama3_001_200 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.321836061649756}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | sabersalehk/Llama3_01_300 | 55ae7ee9-2c50-45d6-ac0e-7c07bbad9a00 | 0.0.1 | hfopenllm_v2/sabersalehk_Llama3_01_300/1762652580.505522 | 1762652580.505523 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sabersalehk/Llama3_01_300 | sabersalehk/Llama3_01_300 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2958827023408999}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | qingy2019/LLaMa_3.2_3B_Catalysts | 2fb27531-96ee-48d2-9416-43ef790d7196 | 0.0.1 | hfopenllm_v2/qingy2019_LLaMa_3.2_3B_Catalysts/1762652580.4818308 | 1762652580.481832 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | qingy2019/LLaMa_3.2_3B_Catalysts | qingy2019/LLaMa_3.2_3B_Catalysts | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.499239794855428}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.0} |
HF Open LLM v2 | meta | qingy2019/OpenMath2-Llama3.1-8B | 75da6225-cc30-480c-b33e-359648932d9d | 0.0.1 | hfopenllm_v2/qingy2019_OpenMath2-Llama3.1-8B/1762652580.482083 | 1762652580.482084 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | qingy2019/OpenMath2-Llama3.1-8B | qingy2019/OpenMath2-Llama3.1-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.23305939352030391}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.0} |
HF Open LLM v2 | meta | GenVRadmin/llama38bGenZ_Vikas-Merged | 22a01298-038f-4069-b847-43409d2d4baa | 0.0.1 | hfopenllm_v2/GenVRadmin_llama38bGenZ_Vikas-Merged/1762652579.627924 | 1762652579.627925 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | GenVRadmin/llama38bGenZ_Vikas-Merged | GenVRadmin/llama38bGenZ_Vikas-Merged | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.30002947734234053}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | NotASI/FineTome-v1.5-Llama3.2-3B-1007 | d8a359e5-2899-4d3f-9fb4-3120f61951f4 | 0.0.1 | hfopenllm_v2/NotASI_FineTome-v1.5-Llama3.2-3B-1007/1762652579.789401 | 1762652579.789401 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NotASI/FineTome-v1.5-Llama3.2-3B-1007 | NotASI/FineTome-v1.5-Llama3.2-3B-1007 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5507719517546776}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | NotASI/FineTome-Llama3.2-3B-1002 | e701f5dc-d604-4bbb-8e92-37d69781ae5f | 0.0.1 | hfopenllm_v2/NotASI_FineTome-Llama3.2-3B-1002/1762652579.788946 | 1762652579.7889469 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NotASI/FineTome-Llama3.2-3B-1002 | NotASI/FineTome-Llama3.2-3B-1002 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5474496558021605}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.0} |
HF Open LLM v2 | meta | NotASI/FineTome-Llama3.2-1B-0929 | 2346a7eb-2148-49f3-b960-363ba6b776d4 | 0.0.1 | hfopenllm_v2/NotASI_FineTome-Llama3.2-1B-0929/1762652579.788707 | 1762652579.7887082 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NotASI/FineTome-Llama3.2-1B-0929 | NotASI/FineTome-Llama3.2-1B-0929 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.39907223943580805}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | NotASI/FineTome-v1.5-Llama3.2-1B-1007 | 8c67c634-82f0-4bb8-bd70-e98902649d96 | 0.0.1 | hfopenllm_v2/NotASI_FineTome-v1.5-Llama3.2-1B-1007/1762652579.789186 | 1762652579.789187 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NotASI/FineTome-v1.5-Llama3.2-1B-1007 | NotASI/FineTome-v1.5-Llama3.2-1B-1007 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.39237777984636324}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | ehristoforu/mllama-3.1-8b-it | c4fa1166-5255-4b95-8c7b-e1f93265f126 | 0.0.1 | hfopenllm_v2/ehristoforu_mllama-3.1-8b-it/1762652580.143829 | 1762652580.14383 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ehristoforu/mllama-3.1-8b-it | ehristoforu/mllama-3.1-8b-it | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.38788193105404767}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | ehristoforu/HappyLlama1 | 07a29c73-e3f4-4f01-b105-ac1ef2fdff43 | 0.0.1 | hfopenllm_v2/ehristoforu_HappyLlama1/1762652580.139553 | 1762652580.139554 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ehristoforu/HappyLlama1 | ehristoforu/HappyLlama1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7362686560548235}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Gryphe/Pantheon-RP-1.0-8b-Llama-3 | a3abb802-acd8-49c7-bcff-3b79a4023d96 | 0.0.1 | hfopenllm_v2/Gryphe_Pantheon-RP-1.0-8b-Llama-3/1762652579.633556 | 1762652579.633556 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Gryphe/Pantheon-RP-1.0-8b-Llama-3 | Gryphe/Pantheon-RP-1.0-8b-Llama-3 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.39325212657969744}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | LEESM/llama-3-8b-bnb-4b-kowiki231101 | 5f540be5-6932-41f4-b588-b88f8cfb89c7 | 0.0.1 | hfopenllm_v2/LEESM_llama-3-8b-bnb-4b-kowiki231101/1762652579.704602 | 1762652579.704603 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LEESM/llama-3-8b-bnb-4b-kowiki231101 | LEESM/llama-3-8b-bnb-4b-kowiki231101 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16848739123303944}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | LEESM/llama-2-7b-hf-lora-oki10p | 9fb11511-0c66-495a-b634-da6bb0934706 | 0.0.1 | hfopenllm_v2/LEESM_llama-2-7b-hf-lora-oki10p/1762652579.704393 | 1762652579.704394 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LEESM/llama-2-7b-hf-lora-oki10p | LEESM/llama-2-7b-hf-lora-oki10p | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.22701432199896276}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 6.738} |
HF Open LLM v2 | meta | LEESM/llama-3-Korean-Bllossom-8B-trexlab-oki10p | 629b8df0-6ce3-4230-baf7-45b3944bf0d5 | 0.0.1 | hfopenllm_v2/LEESM_llama-3-Korean-Bllossom-8B-trexlab-oki10p/1762652579.7048151 | 1762652579.704816 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LEESM/llama-3-Korean-Bllossom-8B-trexlab-oki10p | LEESM/llama-3-Korean-Bllossom-8B-trexlab-oki10p | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21372513818889433}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | LEESM/llama-2-7b-hf-lora-oki100p | 13881952-9fe3-4308-93d5-912e59465d6e | 0.0.1 | hfopenllm_v2/LEESM_llama-2-7b-hf-lora-oki100p/1762652579.704138 | 1762652579.704139 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LEESM/llama-2-7b-hf-lora-oki100p | LEESM/llama-2-7b-hf-lora-oki100p | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.25129434345314877}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 6.738} |
HF Open LLM v2 | meta | mindw96/DeepSeek-llama3.3-Bllossom-8B-DACON-LLM3 | ce85152e-fdde-406a-9818-0eb945ff1d6a | 0.0.1 | hfopenllm_v2/mindw96_DeepSeek-llama3.3-Bllossom-8B-DACON-LLM3/1762652580.360158 | 1762652580.360159 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mindw96/DeepSeek-llama3.3-Bllossom-8B-DACON-LLM3 | mindw96/DeepSeek-llama3.3-Bllossom-8B-DACON-LLM3 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.13881168632561602}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | KingNish/Reasoning-Llama-3b-v0.1 | 5f6f312f-3131-417d-b12e-3e30bb998d27 | 0.0.1 | hfopenllm_v2/KingNish_Reasoning-Llama-3b-v0.1/1762652579.69997 | 1762652579.699971 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | KingNish/Reasoning-Llama-3b-v0.1 | KingNish/Reasoning-Llama-3b-v0.1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6224628430342602}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | Tarek07/Progenitor-V1.1-LLaMa-70B | 8638b115-f092-42f1-949d-162321fe5833 | 0.0.1 | hfopenllm_v2/Tarek07_Progenitor-V1.1-LLaMa-70B/1762652579.911703 | 1762652579.911703 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Tarek07/Progenitor-V1.1-LLaMa-70B | Tarek07/Progenitor-V1.1-LLaMa-70B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6906064796960952}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | meta | Tarek07/Thalassic-Alpha-LLaMa-70B | a20052ae-dfa0-4df7-a9a6-f182dbef513d | 0.0.1 | hfopenllm_v2/Tarek07_Thalassic-Alpha-LLaMa-70B/1762652579.9119601 | 1762652579.911961 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Tarek07/Thalassic-Alpha-LLaMa-70B | Tarek07/Thalassic-Alpha-LLaMa-70B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7003484088884161}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | meta | Weyaxi/Einstein-v8-Llama3.2-1B | 5edf6193-a8d6-41d3-b2fd-20f7ce537770 | 0.0.1 | hfopenllm_v2/Weyaxi_Einstein-v8-Llama3.2-1B/1762652579.9499211 | 1762652579.949922 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Weyaxi/Einstein-v8-Llama3.2-1B | Weyaxi/Einstein-v8-Llama3.2-1B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18622255615101263}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | Weyaxi/Einstein-v6.1-developed-by-Weyaxi-Llama3-8B | 06985382-8aec-4aa3-85ff-774da25ed2d3 | 0.0.1 | hfopenllm_v2/Weyaxi_Einstein-v6.1-developed-by-Weyaxi-Llama3-8B/1762652579.9492018 | 1762652579.949203 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Weyaxi/Einstein-v6.1-developed-by-Weyaxi-Llama3-8B | Weyaxi/Einstein-v6.1-developed-by-Weyaxi-Llama3-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.39270247388041507}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Weyaxi/Einstein-v6.1-Llama3-8B | 13c07664-1ff1-48a4-a43d-877fc05bd19d | 0.0.1 | hfopenllm_v2/Weyaxi_Einstein-v6.1-Llama3-8B/1762652579.9489238 | 1762652579.948925 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Weyaxi/Einstein-v6.1-Llama3-8B | Weyaxi/Einstein-v6.1-Llama3-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4568245588372186}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Nekochu/Llama-3.1-8B-German-ORPO | 83da2d8f-542c-4d21-88f9-b83f4e960579 | 0.0.1 | hfopenllm_v2/Nekochu_Llama-3.1-8B-German-ORPO/1762652579.7705338 | 1762652579.7705338 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Nekochu/Llama-3.1-8B-German-ORPO | Nekochu/Llama-3.1-8B-German-ORPO | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4610710692074806}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | mattshumer/Reflection-Llama-3.1-70B | 155f55e9-34e3-4753-a783-31df44e791e0 | 0.0.1 | hfopenllm_v2/mattshumer_Reflection-Llama-3.1-70B/1762652580.341989 | 1762652580.341989 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mattshumer/Reflection-Llama-3.1-70B | mattshumer/Reflection-Llama-3.1-70B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.00452133671990319}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | meta | argilla-warehouse/Llama-3.1-8B-MagPie-Ultra | 4e4260dc-81e0-4e2f-a7ce-dd6a0f7e0796 | 0.0.1 | hfopenllm_v2/argilla-warehouse_Llama-3.1-8B-MagPie-Ultra/1762652580.018188 | 1762652580.018189 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | argilla-warehouse/Llama-3.1-8B-MagPie-Ultra | argilla-warehouse/Llama-3.1-8B-MagPie-Ultra | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5756514935925566}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | keeeeenw/MicroLlama | 7407c2ed-23f5-4c92-b987-2c3a91147d98 | 0.0.1 | hfopenllm_v2/keeeeenw_MicroLlama/1762652580.3060532 | 1762652580.3060539 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | keeeeenw/MicroLlama | keeeeenw/MicroLlama | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19853765785892544}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 0.305} |
HF Open LLM v2 | meta | 3rd-Degree-Burn/Llama-3.1-8B-Squareroot-v1 | 0851ad0a-7f87-48c8-943a-198ad2ef8ea3 | 0.0.1 | hfopenllm_v2/3rd-Degree-Burn_Llama-3.1-8B-Squareroot-v1/1762652579.470921 | 1762652579.470922 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | 3rd-Degree-Burn/Llama-3.1-8B-Squareroot-v1 | 3rd-Degree-Burn/Llama-3.1-8B-Squareroot-v1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2892381104358657}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | 3rd-Degree-Burn/Llama-3.1-8B-Squareroot | cbe8101a-f057-4151-9391-dbd883f4c09e | 0.0.1 | hfopenllm_v2/3rd-Degree-Burn_Llama-3.1-8B-Squareroot/1762652579.47045 | 1762652579.4704509 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | 3rd-Degree-Burn/Llama-3.1-8B-Squareroot | 3rd-Degree-Burn/Llama-3.1-8B-Squareroot | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.22134381219608418}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | 3rd-Degree-Burn/Llama-Squared-8B | fae2328b-af2f-49ff-a817-9406cf40c3d0 | 0.0.1 | hfopenllm_v2/3rd-Degree-Burn_Llama-Squared-8B/1762652579.471144 | 1762652579.471145 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | 3rd-Degree-Burn/Llama-Squared-8B | 3rd-Degree-Burn/Llama-Squared-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.27552449722292405}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | DavieLion/Llama-3.2-1B-SPIN-iter3 | 7a91746e-e622-4eef-aef8-5f0ba04f03c9 | 0.0.1 | hfopenllm_v2/DavieLion_Llama-3.2-1B-SPIN-iter3/1762652579.5453749 | 1762652579.545376 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DavieLion/Llama-3.2-1B-SPIN-iter3 | DavieLion/Llama-3.2-1B-SPIN-iter3 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1335910938531984}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | DavieLion/Llama-3.2-1B-SPIN-iter3 | 07d16051-fe48-46e6-a47c-806e9f95a92b | 0.0.1 | hfopenllm_v2/DavieLion_Llama-3.2-1B-SPIN-iter3/1762652579.54562 | 1762652579.545621 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DavieLion/Llama-3.2-1B-SPIN-iter3 | DavieLion/Llama-3.2-1B-SPIN-iter3 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1323920530858123}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | DavieLion/Llama-3.2-1B-SPIN-iter2 | 5723e611-e7e0-47c0-a5ac-162f22690d70 | 0.0.1 | hfopenllm_v2/DavieLion_Llama-3.2-1B-SPIN-iter2/1762652579.545113 | 1762652579.545114 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DavieLion/Llama-3.2-1B-SPIN-iter2 | DavieLion/Llama-3.2-1B-SPIN-iter2 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.13761264555822994}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | DavieLion/Llama-3.2-1B-SPIN-iter0 | a9771320-cc89-43fc-b398-7797505bc4e2 | 0.0.1 | hfopenllm_v2/DavieLion_Llama-3.2-1B-SPIN-iter0/1762652579.544659 | 1762652579.5446599 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DavieLion/Llama-3.2-1B-SPIN-iter0 | DavieLion/Llama-3.2-1B-SPIN-iter0 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.15492338107332987}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | DavieLion/Llama-3.2-1B-SPIN-iter0 | 62d01464-4163-432c-a017-bedf41cba649 | 0.0.1 | hfopenllm_v2/DavieLion_Llama-3.2-1B-SPIN-iter0/1762652579.5443351 | 1762652579.5443368 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DavieLion/Llama-3.2-1B-SPIN-iter0 | DavieLion/Llama-3.2-1B-SPIN-iter0 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.15067687070306784}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | DavieLion/Llama-3.2-1B-SPIN-iter1 | c380c4b0-7804-4b59-a7e4-700f0a7122b3 | 0.0.1 | hfopenllm_v2/DavieLion_Llama-3.2-1B-SPIN-iter1/1762652579.5448809 | 1762652579.5448818 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DavieLion/Llama-3.2-1B-SPIN-iter1 | DavieLion/Llama-3.2-1B-SPIN-iter1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.15754642127333254}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | skumar9/Llama-medx_v2 | 1bfc4a7a-2ac8-4454-bbee-0db62608ce5a | 0.0.1 | hfopenllm_v2/skumar9_Llama-medx_v2/1762652580.517576 | 1762652580.517576 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | skumar9/Llama-medx_v2 | skumar9/Llama-medx_v2 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4462337708391512}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | ahmeda335/13_outOf_32_pruned_layers_llama3.1-8b | 54da4a97-6e12-4bb0-9138-dacd981b04bf | 0.0.1 | hfopenllm_v2/ahmeda335_13_outOf_32_pruned_layers_llama3.1-8b/1762652579.97824 | 1762652579.978241 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ahmeda335/13_outOf_32_pruned_layers_llama3.1-8b | ahmeda335/13_outOf_32_pruned_layers_llama3.1-8b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17480728910402177}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 5.195} |
HF Open LLM v2 | meta | nvidia/Llama-3.1-Minitron-4B-Depth-Base | 98402d5d-95a6-4f48-9745-8653b298b48e | 0.0.1 | hfopenllm_v2/nvidia_Llama-3.1-Minitron-4B-Depth-Base/1762652580.4147708 | 1762652580.414772 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nvidia/Llama-3.1-Minitron-4B-Depth-Base | nvidia/Llama-3.1-Minitron-4B-Depth-Base | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16069362624502986}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 4.02} |
HF Open LLM v2 | meta | nvidia/OpenMath2-Llama3.1-8B | 31c103fc-22ab-44a0-aeaf-769a9ff803df | 0.0.1 | hfopenllm_v2/nvidia_OpenMath2-Llama3.1-8B/1762652580.416384 | 1762652580.416384 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nvidia/OpenMath2-Llama3.1-8B | nvidia/OpenMath2-Llama3.1-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.23305939352030391}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Daemontatox/Llama_cot | 01a0a741-5f78-4c31-a743-8e42ba73a22d | 0.0.1 | hfopenllm_v2/Daemontatox_Llama_cot/1762652579.527702 | 1762652579.527703 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Daemontatox/Llama_cot | Daemontatox/Llama_cot | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7548781677061308}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MllamaForConditionalGeneration", "params_billions": 10.67} |
HF Open LLM v2 | meta | Daemontatox/Llama3.3-70B-CogniLink | 20b46645-a1dd-4974-9ad1-444f8ca78481 | 0.0.1 | hfopenllm_v2/Daemontatox_Llama3.3-70B-CogniLink/1762652579.527427 | 1762652579.5274282 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Daemontatox/Llama3.3-70B-CogniLink | Daemontatox/Llama3.3-70B-CogniLink | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6931042965996888}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | meta | Ba2han/Llama-Phi-3_DoRA | 99c4e277-7a0f-4c0c-ac19-25fe6b706a4a | 0.0.1 | hfopenllm_v2/Ba2han_Llama-Phi-3_DoRA/1762652579.4940102 | 1762652579.494011 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Ba2han/Llama-Phi-3_DoRA | Ba2han/Llama-Phi-3_DoRA | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5130531434371911}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | meta | cpayne1303/llama-43m-beta | d987e61a-c7cc-4072-9e2c-faa6304eab65 | 0.0.1 | hfopenllm_v2/cpayne1303_llama-43m-beta/1762652580.117342 | 1762652580.117342 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cpayne1303/llama-43m-beta | cpayne1303/llama-43m-beta | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19489066787235645}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 0.043} |
HF Open LLM v2 | meta | cpayne1303/llama-43m-beta | d79e4774-159d-4b47-8cc0-64d7844e7bfc | 0.0.1 | hfopenllm_v2/cpayne1303_llama-43m-beta/1762652580.117069 | 1762652580.1170702 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cpayne1303/llama-43m-beta | cpayne1303/llama-43m-beta | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19156837191983936}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 0.043} |
HF Open LLM v2 | RDson | RDson/WomboCombo-R1-Coder-14B-Preview | faa623a7-1bf8-4da6-b381-7701f0446b70 | 0.0.1 | hfopenllm_v2/RDson_WomboCombo-R1-Coder-14B-Preview/1762652579.848609 | 1762652579.8486102 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | RDson/WomboCombo-R1-Coder-14B-Preview | RDson/WomboCombo-R1-Coder-14B-Preview | RDson | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.628557782240012}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | EpistemeAI2 | EpistemeAI2/Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.003-128K-code-math | 0115907a-a473-4f12-8f0b-5dafd729fc44 | 0.0.1 | hfopenllm_v2/EpistemeAI2_Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.003-128K-code-math/1762652579.61236 | 1762652579.612361 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EpistemeAI2/Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.003-128K-code-math | EpistemeAI2/Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.003-128K-code-math | EpistemeAI2 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5515465631191904}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.0} |
HF Open LLM v2 | EpistemeAI2 | EpistemeAI2/Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.005-128K-code-COT | 63b6d34d-1a59-40b6-b663-1d81544867f2 | 0.0.1 | hfopenllm_v2/EpistemeAI2_Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.005-128K-code-COT/1762652579.6125782 | 1762652579.612579 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EpistemeAI2/Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.005-128K-code-COT | EpistemeAI2/Fireball-Meta-Llama-3.1-8B-Instruct-Agent-0.005-128K-code-COT | EpistemeAI2 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4633195476890207}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.0} |
HF Open LLM v2 | EpistemeAI2 | EpistemeAI2/Fireball-12B-v1.2 | de86ca37-ffcb-41df-a0d1-68cb545ec1de | 0.0.1 | hfopenllm_v2/EpistemeAI2_Fireball-12B-v1.2/1762652579.609813 | 1762652579.609814 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EpistemeAI2/Fireball-12B-v1.2 | EpistemeAI2/Fireball-12B-v1.2 | EpistemeAI2 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.13553925805750963}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.0} |
HF Open LLM v2 | EpistemeAI2 | EpistemeAI2/Fireball-Alpaca-Llama3.1.06-8B-Philos-dpo | 7e03e547-5324-4c5d-b364-413014fad7eb | 0.0.1 | hfopenllm_v2/EpistemeAI2_Fireball-Alpaca-Llama3.1.06-8B-Philos-dpo/1762652579.610973 | 1762652579.6109738 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EpistemeAI2/Fireball-Alpaca-Llama3.1.06-8B-Philos-dpo | EpistemeAI2/Fireball-Alpaca-Llama3.1.06-8B-Philos-dpo | EpistemeAI2 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4865756193566404}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.0} |
HF Open LLM v2 | HarbingerX | HarbingerX/Zeitgeist-3b-V1 | 3bc34460-661d-404b-bb1c-5b2fe395b897 | 0.0.1 | hfopenllm_v2/HarbingerX_Zeitgeist-3b-V1/1762652579.637166 | 1762652579.6371672 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | HarbingerX/Zeitgeist-3b-V1 | HarbingerX/Zeitgeist-3b-V1 | HarbingerX | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6711724889958643}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | HarbingerX | HarbingerX/Zeitgeist-3b-V1.2 | 37dad0cc-36d1-4a4c-8d9c-0f5246889a0c | 0.0.1 | hfopenllm_v2/HarbingerX_Zeitgeist-3b-V1.2/1762652579.6374269 | 1762652579.637428 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | HarbingerX/Zeitgeist-3b-V1.2 | HarbingerX/Zeitgeist-3b-V1.2 | HarbingerX | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6754189993661264}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | Sao10K | Sao10K/Fimbulvetr-11B-v2 | 135ade7c-f0d1-495a-a5b5-c95712cf0c0f | 0.0.1 | hfopenllm_v2/Sao10K_Fimbulvetr-11B-v2/1762652579.872427 | 1762652579.872428 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sao10K/Fimbulvetr-11B-v2 | Sao10K/Fimbulvetr-11B-v2 | Sao10K | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5100056738343152}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 10.732} |
HF Open LLM v2 | Sao10K | Sao10K/L3-8B-Lunaris-v1 | e15ed4e3-d33f-4dad-98da-e1dad098a6a1 | 0.0.1 | hfopenllm_v2/Sao10K_L3-8B-Lunaris-v1/1762652579.8733618 | 1762652579.873365 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sao10K/L3-8B-Lunaris-v1 | Sao10K/L3-8B-Lunaris-v1 | Sao10K | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6894573066131198}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Sao10K | Sao10K/L3-8B-Stheno-v3.3-32K | 279b82ae-62b2-4703-85f2-1e79e42366f0 | 0.0.1 | hfopenllm_v2/Sao10K_L3-8B-Stheno-v3.3-32K/1762652579.874314 | 1762652579.874315 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sao10K/L3-8B-Stheno-v3.3-32K | Sao10K/L3-8B-Stheno-v3.3-32K | Sao10K | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.46037181345496614}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Sao10K | Sao10K/MN-12B-Lyra-v3 | 2c83813a-8254-4765-9367-efb9ad8c5e6c | 0.0.1 | hfopenllm_v2/Sao10K_MN-12B-Lyra-v3/1762652579.874634 | 1762652579.874634 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sao10K/MN-12B-Lyra-v3 | Sao10K/MN-12B-Lyra-v3 | Sao10K | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4486063644463357}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | Sao10K | Sao10K/L3-8B-Stheno-v3.2 | 85a94072-ac79-4c14-abaa-9a6424a03ab5 | 0.0.1 | hfopenllm_v2/Sao10K_L3-8B-Stheno-v3.2/1762652579.8740559 | 1762652579.874058 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sao10K/L3-8B-Stheno-v3.2 | Sao10K/L3-8B-Stheno-v3.2 | Sao10K | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6872841837435781}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Sao10K | Sao10K/L3-8B-Niitama-v1 | 9c10e944-3955-4478-9d07-f79769d6b884 | 0.0.1 | hfopenllm_v2/Sao10K_L3-8B-Niitama-v1/1762652579.8737721 | 1762652579.873773 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sao10K/L3-8B-Niitama-v1 | Sao10K/L3-8B-Niitama-v1 | Sao10K | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6790659893526954}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Sao10K | Sao10K/L3-70B-Euryale-v2.1 | d730a2be-1cd8-4851-9ecf-55139af1e8f7 | 0.0.1 | hfopenllm_v2/Sao10K_L3-70B-Euryale-v2.1/1762652579.872864 | 1762652579.872865 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sao10K/L3-70B-Euryale-v2.1 | Sao10K/L3-70B-Euryale-v2.1 | Sao10K | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7281003293483512}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | Sao10K | Sao10K/L3-70B-Euryale-v2.1 | 09aab7d9-93ac-4aff-840a-d4ccfb0b469d | 0.0.1 | hfopenllm_v2/Sao10K_L3-70B-Euryale-v2.1/1762652579.872639 | 1762652579.87264 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sao10K/L3-70B-Euryale-v2.1 | Sao10K/L3-70B-Euryale-v2.1 | Sao10K | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7384417789243651}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | Sao10K | Sao10K/70B-L3.3-Cirrus-x1 | 660f8ede-1b7f-4438-8a97-51db77058725 | 0.0.1 | hfopenllm_v2/Sao10K_70B-L3.3-Cirrus-x1/1762652579.8721752 | 1762652579.8721762 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sao10K/70B-L3.3-Cirrus-x1 | Sao10K/70B-L3.3-Cirrus-x1 | Sao10K | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6680751517085777}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | zhengr | zhengr/MixTAO-7Bx2-MoE-v8.1 | 35068575-06a3-4541-bdf3-120bd6db2867 | 0.0.1 | hfopenllm_v2/zhengr_MixTAO-7Bx2-MoE-v8.1/1762652580.6327481 | 1762652580.632749 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zhengr/MixTAO-7Bx2-MoE-v8.1 | zhengr/MixTAO-7Bx2-MoE-v8.1 | zhengr | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4187810564856802}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 12.879} |
HF Open LLM v2 | tomasmcm | tomasmcm/sky-t1-coder-32b-flash | 1229310f-22aa-4ef9-b354-71fa249569f7 | 0.0.1 | hfopenllm_v2/tomasmcm_sky-t1-coder-32b-flash/1762652580.577295 | 1762652580.5772958 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tomasmcm/sky-t1-coder-32b-flash | tomasmcm/sky-t1-coder-32b-flash | tomasmcm | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7780090160773414}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 32.764} |
HF Open LLM v2 | Tremontaine | Tremontaine/L3-12B-Lunaris-v1 | 51e5f1f2-a43a-4ade-9207-1b15d172ba08 | 0.0.1 | hfopenllm_v2/Tremontaine_L3-12B-Lunaris-v1/1762652579.920848 | 1762652579.920848 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Tremontaine/L3-12B-Lunaris-v1 | Tremontaine/L3-12B-Lunaris-v1 | Tremontaine | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6909311737301471}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 11.52} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.69-25.01-RP | 643da0d0-176a-40dd-b096-5aac8de827e9 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.69-25.01-RP/1762652580.219263 | 1762652580.219264 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.69-25.01-RP | icefog72/Ice0.69-25.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5437527981311808}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/IceDrunkenCherryRP-7b | 9d1e6b55-aa7c-4fea-8a77-92795c0ee60a | 0.0.1 | hfopenllm_v2/icefog72_IceDrunkenCherryRP-7b/1762652580.223197 | 1762652580.223207 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/IceDrunkenCherryRP-7b | icefog72/IceDrunkenCherryRP-7b | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4762585495374495}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.7-29.09-RP | 9c6cf7a1-1a17-4070-9ce3-633461334f42 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.7-29.09-RP/1762652580.2194638 | 1762652580.219465 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.7-29.09-RP | icefog72/Ice0.7-29.09-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5175744801570943}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/IceSakeV6RP-7b | e9ebbcbf-81d5-494b-95a1-4e79feb42c40 | 0.0.1 | hfopenllm_v2/icefog72_IceSakeV6RP-7b/1762652580.224776 | 1762652580.224777 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/IceSakeV6RP-7b | icefog72/IceSakeV6RP-7b | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5032613465604596}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.68-25.01-RP | dd7cb16f-0752-4639-aa99-90b9be448295 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.68-25.01-RP/1762652580.2190669 | 1762652580.2190678 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.68-25.01-RP | icefog72/Ice0.68-25.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5513714721383707}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.29-06.11-RP | b07e3d05-409f-498a-a324-82c4a592d4dc | 0.0.1 | hfopenllm_v2/icefog72_Ice0.29-06.11-RP/1762652580.2119 | 1762652580.211901 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.29-06.11-RP | icefog72/Ice0.29-06.11-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.486050346414181}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/IceCoffeeRP-7b | bf5e2b11-79ce-49ed-947b-fb34110a3802 | 0.0.1 | hfopenllm_v2/icefog72_IceCoffeeRP-7b/1762652580.2220101 | 1762652580.2220109 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/IceCoffeeRP-7b | icefog72/IceCoffeeRP-7b | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4959174989029109}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.15-02.10-RP | 20c0d1f9-24b8-4993-82f1-d9889c18c56a | 0.0.1 | hfopenllm_v2/icefog72_Ice0.15-02.10-RP/1762652580.211034 | 1762652580.211034 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.15-02.10-RP | icefog72/Ice0.15-02.10-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5343355629729118}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.32-10.11-RP | 68e99fe4-634e-4462-b1db-d2d40814ff0b | 0.0.1 | hfopenllm_v2/icefog72_Ice0.32-10.11-RP/1762652580.2122939 | 1762652580.2122948 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.32-10.11-RP | icefog72/Ice0.32-10.11-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.49154576523623983}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/IceMartiniRP-7b | 210bea5c-35de-4bd6-93db-871704add0d6 | 0.0.1 | hfopenllm_v2/icefog72_IceMartiniRP-7b/1762652580.223922 | 1762652580.223923 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/IceMartiniRP-7b | icefog72/IceMartiniRP-7b | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5044603873278457}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/IceLemonTeaRP-32k-7b | fd90b65b-7b6f-4ca2-93e3-59486c0ee070 | 0.0.1 | hfopenllm_v2/icefog72_IceLemonTeaRP-32k-7b/1762652580.2236779 | 1762652580.223679 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/IceLemonTeaRP-32k-7b | icefog72/IceLemonTeaRP-32k-7b | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5212214701436633}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/IceSakeRP-7b | 67e351c8-6cca-4982-86e9-e774786c6862 | 0.0.1 | hfopenllm_v2/icefog72_IceSakeRP-7b/1762652580.2243059 | 1762652580.224307 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/IceSakeRP-7b | icefog72/IceSakeRP-7b | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5227950726295119}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.41-22.11-RP | 43a30cf0-ccb5-46ce-b520-55ee110002c9 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.41-22.11-RP/1762652580.213999 | 1762652580.2140002 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.41-22.11-RP | icefog72/Ice0.41-22.11-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4620451513096362}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.34n-14.11-RP | 8c6aae5b-6a9b-47fb-908b-6b51159cc9b2 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.34n-14.11-RP/1762652580.2127092 | 1762652580.21271 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.34n-14.11-RP | icefog72/Ice0.34n-14.11-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.47865663107222167}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.67-25.01-RP | cf0a4a2d-a104-43cf-ac01-66250e880ff0 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.67-25.01-RP/1762652580.21887 | 1762652580.218871 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.67-25.01-RP | icefog72/Ice0.67-25.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.536134124123991}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.50.1-16.01-RP | fde6323e-0bfe-4ec9-aa86-4371bbd1645a | 0.0.1 | hfopenllm_v2/icefog72_Ice0.50.1-16.01-RP/1762652580.214615 | 1762652580.214617 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.50.1-16.01-RP | icefog72/Ice0.50.1-16.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4829031414424837}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/IceDrinkNameNotFoundRP-7b-Model_Stock | f0e6fa5e-20c2-407d-8301-70d86cb1a51f | 0.0.1 | hfopenllm_v2/icefog72_IceDrinkNameNotFoundRP-7b-Model_Stock/1762652580.2227032 | 1762652580.2227042 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/IceDrinkNameNotFoundRP-7b-Model_Stock | icefog72/IceDrinkNameNotFoundRP-7b-Model_Stock | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5130032757527804}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/IceCocoaRP-7b | 5427828d-b53d-4e44-82ed-df6a9c0f9a47 | 0.0.1 | hfopenllm_v2/icefog72_IceCocoaRP-7b/1762652580.2217228 | 1762652580.2217238 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/IceCocoaRP-7b | icefog72/IceCocoaRP-7b | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4962421929369628}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.53-16.01-RP | 6415adfc-35a9-480c-a740-dac02725c8f0 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.53-16.01-RP/1762652580.215963 | 1762652580.2159638 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.53-16.01-RP | icefog72/Ice0.53-16.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4741352943523185}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.60-18.01-RP | b5c42995-f1fe-4a7e-90c1-d8fb00cba116 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.60-18.01-RP/1762652580.217043 | 1762652580.2170439 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.60-18.01-RP | icefog72/Ice0.60-18.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5374329002601985}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.57-17.01-RP | 8d99bf0e-7db0-46f5-96a0-7f977b8cf5f2 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.57-17.01-RP/1762652580.216822 | 1762652580.216822 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.57-17.01-RP | icefog72/Ice0.57-17.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5151763986223221}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.52-16.01-RP | 72412b78-cc3e-4652-9034-32c72aee5796 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.52-16.01-RP/1762652580.21541 | 1762652580.215412 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.52-16.01-RP | icefog72/Ice0.52-16.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4503051902285935}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.61-18.01-RP | 1c166a10-c176-42c7-9421-750e170f5706 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.61-18.01-RP/1762652580.2174668 | 1762652580.2174678 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.61-18.01-RP | icefog72/Ice0.61-18.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5441273598496433}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.64.1-24.01-RP | 359daeb1-3546-473f-801b-c9942fd010aa | 0.0.1 | hfopenllm_v2/icefog72_Ice0.64.1-24.01-RP/1762652580.218272 | 1762652580.218272 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.64.1-24.01-RP | icefog72/Ice0.64.1-24.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5446770125489258}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/IceEspressoRPv2-7b | ade14c35-442b-4a0a-8345-99b7b58dc194 | 0.0.1 | hfopenllm_v2/icefog72_IceEspressoRPv2-7b/1762652580.223459 | 1762652580.2234602 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/IceEspressoRPv2-7b | icefog72/IceEspressoRPv2-7b | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4977160600539901}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.38-19.11-RP | 4d13aaf7-a18d-4bad-ab22-8e08c3f2e16a | 0.0.1 | hfopenllm_v2/icefog72_Ice0.38-19.11-RP/1762652580.213116 | 1762652580.213117 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.38-19.11-RP | icefog72/Ice0.38-19.11-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.44033830237104216}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.55-17.01-RP | a2de66f0-bbd1-40b9-95d3-74e0335b853b | 0.0.1 | hfopenllm_v2/icefog72_Ice0.55-17.01-RP/1762652580.2165911 | 1762652580.2165918 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.55-17.01-RP | icefog72/Ice0.55-17.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.496067101956143}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.64-24.01-RP | d7313786-f553-454e-b2c8-62a0162c9339 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.64-24.01-RP/1762652580.218076 | 1762652580.218076 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.64-24.01-RP | icefog72/Ice0.64-24.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5440774921652327}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.50-16.01-RP | 37602e25-bd23-462a-8566-38f3b0fee63d | 0.0.1 | hfopenllm_v2/icefog72_Ice0.50-16.01-RP/1762652580.214273 | 1762652580.214274 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.50-16.01-RP | icefog72/Ice0.50-16.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.43848987353555235}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.74-02.02-RP | 7470c7d4-80fe-4e88-a695-c628f9ed3682 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.74-02.02-RP/1762652580.220269 | 1762652580.2202702 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.74-02.02-RP | icefog72/Ice0.74-02.02-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2935344884905384}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | icefog72 | icefog72/Ice0.66-25.01-RP | b619dad2-fcb2-45ab-b603-ae1da3916eb7 | 0.0.1 | hfopenllm_v2/icefog72_Ice0.66-25.01-RP/1762652580.2186701 | 1762652580.2186701 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | icefog72/Ice0.66-25.01-RP | icefog72/Ice0.66-25.01-RP | icefog72 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.532487134137422}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.