_leaderboard stringclasses 1
value | _developer stringclasses 559
values | _model stringlengths 9 102 | _uuid stringlengths 36 36 | schema_version stringclasses 1
value | evaluation_id stringlengths 35 133 | retrieved_timestamp stringlengths 13 18 | source_data stringclasses 1
value | evaluation_source_name stringclasses 1
value | evaluation_source_type stringclasses 1
value | source_organization_name stringclasses 1
value | source_organization_url null | source_organization_logo_url null | evaluator_relationship stringclasses 1
value | model_name stringlengths 4 102 | model_id stringlengths 9 102 | model_developer stringclasses 559
values | model_inference_platform stringclasses 1
value | evaluation_results stringlengths 1.35k 1.41k | additional_details stringclasses 660
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
HF Open LLM v2 | meta | cluebbers/Llama-3.1-8B-paraphrase-type-generation-apty-sigmoid | f7aec62a-004e-4034-b4d9-152452bb519a | 0.0.1 | hfopenllm_v2/cluebbers_Llama-3.1-8B-paraphrase-type-generation-apty-sigmoid/1762652580.110752 | 1762652580.110753 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cluebbers/Llama-3.1-8B-paraphrase-type-generation-apty-sigmoid | cluebbers/Llama-3.1-8B-paraphrase-type-generation-apty-sigmoid | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.13184240038652995}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | cluebbers/Llama-3.1-8B-paraphrase-type-generation-etpc | dbec72eb-bef2-4985-9ac6-bf5c6dabc25c | 0.0.1 | hfopenllm_v2/cluebbers_Llama-3.1-8B-paraphrase-type-generation-etpc/1762652580.1111748 | 1762652580.111176 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cluebbers/Llama-3.1-8B-paraphrase-type-generation-etpc | cluebbers/Llama-3.1-8B-paraphrase-type-generation-etpc | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.12085156274241235}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | cloudyu/S1-Llama-3.2-3Bx4-MoE | 4cd18600-a389-4a22-88f8-0e35739665bb | 0.0.1 | hfopenllm_v2/cloudyu_S1-Llama-3.2-3Bx4-MoE/1762652580.103262 | 1762652580.103263 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cloudyu/S1-Llama-3.2-3Bx4-MoE | cloudyu/S1-Llama-3.2-3Bx4-MoE | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.530214275899059}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "MixtralForCausalLM", "params_billions": 9.555} |
HF Open LLM v2 | meta | cloudyu/Llama-3.2-3Bx4 | 0f4eaf10-0a2d-48e7-9c22-e1c771da16a0 | 0.0.1 | hfopenllm_v2/cloudyu_Llama-3.2-3Bx4/1762652580.102047 | 1762652580.102048 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cloudyu/Llama-3.2-3Bx4 | cloudyu/Llama-3.2-3Bx4 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5068584688626179}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MixtralForCausalLM", "params_billions": 9.949} |
HF Open LLM v2 | meta | cloudyu/Llama-3-70Bx2-MOE | 8d0fa497-cdaa-4206-ae80-babed3089d43 | 0.0.1 | hfopenllm_v2/cloudyu_Llama-3-70Bx2-MOE/1762652580.10177 | 1762652580.101771 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cloudyu/Llama-3-70Bx2-MOE | cloudyu/Llama-3-70Bx2-MOE | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5482486469234964}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 126.926} |
HF Open LLM v2 | meta | ontocord/Llama_3.2_1b-autoredteam_helpfulness-train | 8277cf4f-865b-4b3e-afcb-b906064dfc20 | 0.0.1 | hfopenllm_v2/ontocord_Llama_3.2_1b-autoredteam_helpfulness-train/1762652580.417561 | 1762652580.417561 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ontocord/Llama_3.2_1b-autoredteam_helpfulness-train | ontocord/Llama_3.2_1b-autoredteam_helpfulness-train | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2765484470094904}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 1.498} |
HF Open LLM v2 | meta | IDEA-CCNL/Ziya-LLaMA-13B-v1 | 98616cce-563a-4977-b5c0-bf8df3102303 | 0.0.1 | hfopenllm_v2/IDEA-CCNL_Ziya-LLaMA-13B-v1/1762652579.645581 | 1762652579.645581 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | IDEA-CCNL/Ziya-LLaMA-13B-v1 | IDEA-CCNL/Ziya-LLaMA-13B-v1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16968643200042555}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 13.0} |
HF Open LLM v2 | meta | Xiaojian9992024/Llama3.2-1B-THREADRIPPER-v0.2 | 5ae4b63d-a84b-4468-aefe-8b5c7b88323e | 0.0.1 | hfopenllm_v2/Xiaojian9992024_Llama3.2-1B-THREADRIPPER-v0.2/1762652579.952687 | 1762652579.9526882 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Xiaojian9992024/Llama3.2-1B-THREADRIPPER-v0.2 | Xiaojian9992024/Llama3.2-1B-THREADRIPPER-v0.2 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5317878783849076}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | Xiaojian9992024/Llama3.2-1B-THREADRIPPER | b7c71bb9-0f3b-4d2f-8902-5fefac1629c5 | 0.0.1 | hfopenllm_v2/Xiaojian9992024_Llama3.2-1B-THREADRIPPER/1762652579.952322 | 1762652579.952322 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Xiaojian9992024/Llama3.2-1B-THREADRIPPER | Xiaojian9992024/Llama3.2-1B-THREADRIPPER | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5575916346405316}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | Triangle104/Hermes-Llama-3.2-CoT-Summary | 9bd6ca33-d62a-4327-a11e-f36188f0256a | 0.0.1 | hfopenllm_v2/Triangle104_Hermes-Llama-3.2-CoT-Summary/1762652579.925437 | 1762652579.925438 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/Hermes-Llama-3.2-CoT-Summary | Triangle104/Hermes-Llama-3.2-CoT-Summary | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.48302836473889277}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | Triangle104/DS-R1-Llama-8B-Harmony | ef25dd23-7cc0-46ad-898d-31bfb5205aad | 0.0.1 | hfopenllm_v2/Triangle104_DS-R1-Llama-8B-Harmony/1762652579.9232068 | 1762652579.9232068 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/DS-R1-Llama-8B-Harmony | Triangle104/DS-R1-Llama-8B-Harmony | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.35663262366077564}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Triangle104/DS-Distilled-Hermes-Llama-3.1_TIES | 9383604e-dd29-4c51-87eb-68f19ff929ec | 0.0.1 | hfopenllm_v2/Triangle104_DS-Distilled-Hermes-Llama-3.1_TIES/1762652579.922394 | 1762652579.922395 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/DS-Distilled-Hermes-Llama-3.1_TIES | Triangle104/DS-Distilled-Hermes-Llama-3.1_TIES | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.13641360479084386}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Triangle104/Llama3.1-cc-Lit-8b | 3ccecc91-6528-4592-8ca3-722a62bfa102 | 0.0.1 | hfopenllm_v2/Triangle104_Llama3.1-cc-Lit-8b/1762652579.927792 | 1762652579.9277928 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/Llama3.1-cc-Lit-8b | Triangle104/Llama3.1-cc-Lit-8b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2993047336622384}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Triangle104/Llama3.1-Allades-Lit-8b | d3d2f0cc-2775-4a01-b8ae-5206cafcb2bb | 0.0.1 | hfopenllm_v2/Triangle104_Llama3.1-Allades-Lit-8b/1762652579.927552 | 1762652579.927553 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/Llama3.1-Allades-Lit-8b | Triangle104/Llama3.1-Allades-Lit-8b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.24612361866514182}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Triangle104/RomboHermes3-R1-Llama3.2-3b | 8ce06258-4909-4e46-a326-85052d28c5ff | 0.0.1 | hfopenllm_v2/Triangle104_RomboHermes3-R1-Llama3.2-3b/1762652579.9345112 | 1762652579.9345121 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/RomboHermes3-R1-Llama3.2-3b | Triangle104/RomboHermes3-R1-Llama3.2-3b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.300728733094855}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | Triangle104/Porpoise-R1-Llama3.2-3b | 29843ea0-0ab4-44e1-8206-10a1135cce8a | 0.0.1 | hfopenllm_v2/Triangle104_Porpoise-R1-Llama3.2-3b/1762652579.931781 | 1762652579.931781 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/Porpoise-R1-Llama3.2-3b | Triangle104/Porpoise-R1-Llama3.2-3b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4352174452674459}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | Triangle104/Hermes-Llama-3.2-CoT | ddacf85a-a333-4cf9-b0f2-b9a5d5831b8c | 0.0.1 | hfopenllm_v2/Triangle104_Hermes-Llama-3.2-CoT/1762652579.925184 | 1762652579.925184 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/Hermes-Llama-3.2-CoT | Triangle104/Hermes-Llama-3.2-CoT | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4177571066991139}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | Triangle104/DS-Distilled-Hermes-Llama-3.1 | d8a0873b-58e8-449a-aedd-7117e9931546 | 0.0.1 | hfopenllm_v2/Triangle104_DS-Distilled-Hermes-Llama-3.1/1762652579.9221509 | 1762652579.922152 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/DS-Distilled-Hermes-Llama-3.1 | Triangle104/DS-Distilled-Hermes-Llama-3.1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3229353670483207}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Triangle104/DSR1-Distill-Llama-Lit-8B | b31d5098-4324-4307-aa50-2413ceba5481 | 0.0.1 | hfopenllm_v2/Triangle104_DSR1-Distill-Llama-Lit-8B/1762652579.923411 | 1762652579.923412 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/DSR1-Distill-Llama-Lit-8B | Triangle104/DSR1-Distill-Llama-Lit-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18852090231696345}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Triangle104/Dolphin3-Llama3.2-Smart | 88532e60-eff6-404b-8e74-fd8836a99ff9 | 0.0.1 | hfopenllm_v2/Triangle104_Dolphin3-Llama3.2-Smart/1762652579.924712 | 1762652579.924713 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Triangle104/Dolphin3-Llama3.2-Smart | Triangle104/Dolphin3-Llama3.2-Smart | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.413660199382084}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | Danielbrdz/Barcenas-Llama3-8b-ORPO | 83f9e48d-919e-42ec-8ea4-cc933a1b98f5 | 0.0.1 | hfopenllm_v2/Danielbrdz_Barcenas-Llama3-8b-ORPO/1762652579.534392 | 1762652579.534392 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Danielbrdz/Barcenas-Llama3-8b-ORPO | Danielbrdz/Barcenas-Llama3-8b-ORPO | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.737242738156979}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | SaisExperiments/RightSheep-Llama3.2-3B | 4ef7907b-270f-45dc-8f18-88c62c1c8bfe | 0.0.1 | hfopenllm_v2/SaisExperiments_RightSheep-Llama3.2-3B/1762652579.8563251 | 1762652579.8563259 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SaisExperiments/RightSheep-Llama3.2-3B | SaisExperiments/RightSheep-Llama3.2-3B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4156338515139829}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | togethercomputer/LLaMA-2-7B-32K | 29dae40d-4786-4fbc-92fa-3415b0c35488 | 0.0.1 | hfopenllm_v2/togethercomputer_LLaMA-2-7B-32K/1762652580.574694 | 1762652580.5746949 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | togethercomputer/LLaMA-2-7B-32K | togethercomputer/LLaMA-2-7B-32K | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18649738250065384}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | meta | Etherll/Herplete-LLM-Llama-3.1-8b-Ties | febdde9e-8e67-458b-be79-6a9c91a7237a | 0.0.1 | hfopenllm_v2/Etherll_Herplete-LLM-Llama-3.1-8b-Ties/1762652579.614388 | 1762652579.614389 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Etherll/Herplete-LLM-Llama-3.1-8b-Ties | Etherll/Herplete-LLM-Llama-3.1-8b-Ties | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6163679038285084}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Etherll/Herplete-LLM-Llama-3.1-8b | 3d70d2d7-1510-45de-93dc-1ba93cb0f24a | 0.0.1 | hfopenllm_v2/Etherll_Herplete-LLM-Llama-3.1-8b/1762652579.614203 | 1762652579.614203 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Etherll/Herplete-LLM-Llama-3.1-8b | Etherll/Herplete-LLM-Llama-3.1-8b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6105976586568084}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Etherll/Herplete-LLM-Llama-3.1-8b | 52e6e50e-4621-491f-9e46-8d6d398c4344 | 0.0.1 | hfopenllm_v2/Etherll_Herplete-LLM-Llama-3.1-8b/1762652579.613958 | 1762652579.6139588 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Etherll/Herplete-LLM-Llama-3.1-8b | Etherll/Herplete-LLM-Llama-3.1-8b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.46719149634082013}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Etherll/Replete-LLM-V3-Llama-3.1-8b | 66846c9d-e2bc-416d-95b4-fed31d1b781b | 0.0.1 | hfopenllm_v2/Etherll_Replete-LLM-V3-Llama-3.1-8b/1762652579.6150668 | 1762652579.615068 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Etherll/Replete-LLM-V3-Llama-3.1-8b | Etherll/Replete-LLM-V3-Llama-3.1-8b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5262924595628488}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | abacusai/Llama-3-Smaug-8B | ea57e277-5694-4981-ac47-d2fa633847ca | 0.0.1 | hfopenllm_v2/abacusai_Llama-3-Smaug-8B/1762652579.9700851 | 1762652579.9700859 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | abacusai/Llama-3-Smaug-8B | abacusai/Llama-3-Smaug-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.48667535472546175}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | cognitivecomputations/Dolphin3.0-Llama3.2-1B | 0aecb893-2b9b-4cfd-bf97-b9887b0aa539 | 0.0.1 | hfopenllm_v2/cognitivecomputations_Dolphin3.0-Llama3.2-1B/1762652580.112042 | 1762652580.112046 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/Dolphin3.0-Llama3.2-1B | cognitivecomputations/Dolphin3.0-Llama3.2-1B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5427787160290252}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | cognitivecomputations/dolphin-2.9.4-llama3.1-8b | d7da3f99-b538-4b33-a3dc-b2e4a96d3f89 | 0.0.1 | hfopenllm_v2/cognitivecomputations_dolphin-2.9.4-llama3.1-8b/1762652580.1160939 | 1762652580.116095 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/dolphin-2.9.4-llama3.1-8b | cognitivecomputations/dolphin-2.9.4-llama3.1-8b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.27572396796056686}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | cognitivecomputations/Dolphin3.0-Llama3.1-8B | fa439482-ca9c-49c3-9732-1147c3965c56 | 0.0.1 | hfopenllm_v2/cognitivecomputations_Dolphin3.0-Llama3.1-8B/1762652580.111501 | 1762652580.1115022 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/Dolphin3.0-Llama3.1-8B | cognitivecomputations/Dolphin3.0-Llama3.1-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7621222799948582}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | cognitivecomputations/dolphin-2.9.1-llama-3-70b | 7c975279-f21e-418b-bc0b-739a933b91dc | 0.0.1 | hfopenllm_v2/cognitivecomputations_dolphin-2.9.1-llama-3-70b/1762652580.113282 | 1762652580.1132832 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/dolphin-2.9.1-llama-3-70b | cognitivecomputations/dolphin-2.9.1-llama-3-70b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3760167466765959}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | meta | cognitivecomputations/dolphin-2.9-llama3-8b | d985b9ab-a760-4a50-973e-6985e778b97d | 0.0.1 | hfopenllm_v2/cognitivecomputations_dolphin-2.9-llama3-8b/1762652580.113044 | 1762652580.113045 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | cognitivecomputations/dolphin-2.9-llama3-8b | cognitivecomputations/dolphin-2.9-llama3-8b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.38503393218881454}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | agentlans/Llama3.1-8B-drill | 869f9850-417b-43d7-bb40-61375a8bb09c | 0.0.1 | hfopenllm_v2/agentlans_Llama3.1-8B-drill/1762652579.976306 | 1762652579.976307 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | agentlans/Llama3.1-8B-drill | agentlans/Llama3.1-8B-drill | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.765169749597734}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | agentlans/Llama3.1-Daredevilish | 417b2c35-090e-42c3-8a92-04f7258702a3 | 0.0.1 | hfopenllm_v2/agentlans_Llama3.1-Daredevilish/1762652579.976594 | 1762652579.976595 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | agentlans/Llama3.1-Daredevilish | agentlans/Llama3.1-Daredevilish | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6291573026237051}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | agentlans/Llama3.1-LexiHermes-SuperStorm | 6f966179-a456-4914-807d-45ab507e0388 | 0.0.1 | hfopenllm_v2/agentlans_Llama3.1-LexiHermes-SuperStorm/1762652579.97705 | 1762652579.9770508 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | agentlans/Llama3.1-LexiHermes-SuperStorm | agentlans/Llama3.1-LexiHermes-SuperStorm | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7834545672149895}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | agentlans/Llama3.1-SuperDeepFuse-CrashCourse12K | 455bd496-7a32-45c9-a792-3982781fdc16 | 0.0.1 | hfopenllm_v2/agentlans_Llama3.1-SuperDeepFuse-CrashCourse12K/1762652579.977621 | 1762652579.977621 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | agentlans/Llama3.1-SuperDeepFuse-CrashCourse12K | agentlans/Llama3.1-SuperDeepFuse-CrashCourse12K | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.718732961874493}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | agentlans/Llama3.1-SuperDeepFuse | 6301252b-2353-438a-9e60-c6a572adfc5f | 0.0.1 | hfopenllm_v2/agentlans_Llama3.1-SuperDeepFuse/1762652579.977348 | 1762652579.97735 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | agentlans/Llama3.1-SuperDeepFuse | agentlans/Llama3.1-SuperDeepFuse | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7761605872418517}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | collaiborateorg/Collaiborator-MEDLLM-Llama-3-8B-v2 | 55eeee3c-b812-4359-ab5f-4e3fa976648f | 0.0.1 | hfopenllm_v2/collaiborateorg_Collaiborator-MEDLLM-Llama-3-8B-v2/1762652580.116315 | 1762652580.116315 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | collaiborateorg/Collaiborator-MEDLLM-Llama-3-8B-v2 | collaiborateorg/Collaiborator-MEDLLM-Llama-3-8B-v2 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.380887157187374}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | NousResearch/Nous-Hermes-llama-2-7b | 6ab36d53-da10-4f80-bd1b-dc037a020362 | 0.0.1 | hfopenllm_v2/NousResearch_Nous-Hermes-llama-2-7b/1762652579.792065 | 1762652579.792066 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Nous-Hermes-llama-2-7b | NousResearch/Nous-Hermes-llama-2-7b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17290788441335658}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 6.738} |
HF Open LLM v2 | meta | NousResearch/Hermes-2-Theta-Llama-3-8B | 99c4b14f-8ea6-4f6e-af65-1e2ee58eeca9 | 0.0.1 | hfopenllm_v2/NousResearch_Hermes-2-Theta-Llama-3-8B/1762652579.79036 | 1762652579.79036 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Hermes-2-Theta-Llama-3-8B | NousResearch/Hermes-2-Theta-Llama-3-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6517883659800441}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | NousResearch/Hermes-3-Llama-3.1-70B | e48bd1d8-1082-4b79-8145-87d7f013fb82 | 0.0.1 | hfopenllm_v2/NousResearch_Hermes-3-Llama-3.1-70B/1762652579.7905731 | 1762652579.7905731 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Hermes-3-Llama-3.1-70B | NousResearch/Hermes-3-Llama-3.1-70B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7661438316998896}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | meta | NousResearch/Hermes-3-Llama-3.2-3B | 7e5f7bc1-1f9a-497a-a903-7d612bb923ca | 0.0.1 | hfopenllm_v2/NousResearch_Hermes-3-Llama-3.2-3B/1762652579.790994 | 1762652579.790995 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Hermes-3-Llama-3.2-3B | NousResearch/Hermes-3-Llama-3.2-3B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3824862476008103}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | NousResearch/Yarn-Llama-2-7b-128k | e3e717a5-a987-4e94-a528-9aafadb6774f | 0.0.1 | hfopenllm_v2/NousResearch_Yarn-Llama-2-7b-128k/1762652579.792481 | 1762652579.7924821 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Yarn-Llama-2-7b-128k | NousResearch/Yarn-Llama-2-7b-128k | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.14847825990593846}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | meta | NousResearch/Hermes-3-Llama-3.1-8B | b9300d76-c854-48a2-a900-b661c1fae7bf | 0.0.1 | hfopenllm_v2/NousResearch_Hermes-3-Llama-3.1-8B/1762652579.790786 | 1762652579.790787 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Hermes-3-Llama-3.1-8B | NousResearch/Hermes-3-Llama-3.1-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6170172918966121}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | NousResearch/Yarn-Llama-2-13b-128k | e067537a-a621-483f-b1cf-ee78f57a39da | 0.0.1 | hfopenllm_v2/NousResearch_Yarn-Llama-2-13b-128k/1762652579.792277 | 1762652579.792278 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Yarn-Llama-2-13b-128k | NousResearch/Yarn-Llama-2-13b-128k | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16546430138698653}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 13.0} |
HF Open LLM v2 | meta | NousResearch/Hermes-2-Pro-Llama-3-8B | af47ca72-b9b5-4cf3-84a7-e2f4602e6eaa | 0.0.1 | hfopenllm_v2/NousResearch_Hermes-2-Pro-Llama-3-8B/1762652579.78989 | 1762652579.789891 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Hermes-2-Pro-Llama-3-8B | NousResearch/Hermes-2-Pro-Llama-3-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5361839918084017}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.031} |
HF Open LLM v2 | meta | NousResearch/Yarn-Llama-2-7b-64k | 50db2b1d-e0b5-43b1-86e2-5fa55fb3a960 | 0.0.1 | hfopenllm_v2/NousResearch_Yarn-Llama-2-7b-64k/1762652579.7927492 | 1762652579.792753 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Yarn-Llama-2-7b-64k | NousResearch/Yarn-Llama-2-7b-64k | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1699856381068897}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | meta | bosonai/Higgs-Llama-3-70B | ebac2d72-ef36-43a7-83de-e28ae3eb4b22 | 0.0.1 | hfopenllm_v2/bosonai_Higgs-Llama-3-70B/1762652580.035682 | 1762652580.035682 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bosonai/Higgs-Llama-3-70B | bosonai/Higgs-Llama-3-70B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5560678998390935}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | meta | prithivMLmods/Llama-Deepsync-1B | 5516c5d6-29c9-46dc-ae29-61876fb488c2 | 0.0.1 | hfopenllm_v2/prithivMLmods_Llama-Deepsync-1B/1762652580.4655502 | 1762652580.4655511 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Llama-Deepsync-1B | prithivMLmods/Llama-Deepsync-1B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3570071853792382}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | prithivMLmods/Llama-8B-Distill-CoT | 6b1d1057-0091-4e44-822f-f7c1e5dc3ce9 | 0.0.1 | hfopenllm_v2/prithivMLmods_Llama-8B-Distill-CoT/1762652580.465258 | 1762652580.465258 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Llama-8B-Distill-CoT | prithivMLmods/Llama-8B-Distill-CoT | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3341511633576688}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | prithivMLmods/Llama-3.2-6B-AlgoCode | 914b588e-6da8-4a08-9313-ac7004fd8b97 | 0.0.1 | hfopenllm_v2/prithivMLmods_Llama-3.2-6B-AlgoCode/1762652580.465046 | 1762652580.465046 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Llama-3.2-6B-AlgoCode | prithivMLmods/Llama-3.2-6B-AlgoCode | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21357553513566227}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 6.339} |
HF Open LLM v2 | meta | prithivMLmods/Llama-Deepsync-3B | fbdcf318-d1b5-4ed6-b13d-efb14dfaf09f | 0.0.1 | hfopenllm_v2/prithivMLmods_Llama-Deepsync-3B/1762652580.465787 | 1762652580.465788 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Llama-Deepsync-3B | prithivMLmods/Llama-Deepsync-3B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4302218114602588}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | prithivMLmods/Llama-3.2-3B-Math-Oct | 5ab1b41f-ee87-475c-b48b-e154c580d560 | 0.0.1 | hfopenllm_v2/prithivMLmods_Llama-3.2-3B-Math-Oct/1762652580.464829 | 1762652580.46483 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Llama-3.2-3B-Math-Oct | prithivMLmods/Llama-3.2-3B-Math-Oct | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4585233846194763}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | prithivMLmods/Deepthink-Llama-3-8B-Preview | 020f77a1-1051-4f85-8037-ed4f8b12474a | 0.0.1 | hfopenllm_v2/prithivMLmods_Deepthink-Llama-3-8B-Preview/1762652580.459939 | 1762652580.459939 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Deepthink-Llama-3-8B-Preview | prithivMLmods/Deepthink-Llama-3-8B-Preview | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.29553252037926037}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | prithivMLmods/Llama-Express.1-Math | 99fd40d7-8d26-4088-ba03-1c1d7ed11ca0 | 0.0.1 | hfopenllm_v2/prithivMLmods_Llama-Express.1-Math/1762652580.466016 | 1762652580.466017 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | prithivMLmods/Llama-Express.1-Math | prithivMLmods/Llama-Express.1-Math | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5084320713484665}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | meditsolutions/Llama-3.2-SUN-2.4B-checkpoint-34800 | 23dca426-d0d9-43d0-86ff-50e01cc292d0 | 0.0.1 | hfopenllm_v2/meditsolutions_Llama-3.2-SUN-2.4B-checkpoint-34800/1762652580.343692 | 1762652580.343693 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | meditsolutions/Llama-3.2-SUN-2.4B-checkpoint-34800 | meditsolutions/Llama-3.2-SUN-2.4B-checkpoint-34800 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.25009530268576263}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 2.209} |
HF Open LLM v2 | meta | meditsolutions/Llama-3.1-MedIT-SUN-8B | 94d286c8-8356-4bdd-ac91-2ce517b6b974 | 0.0.1 | hfopenllm_v2/meditsolutions_Llama-3.1-MedIT-SUN-8B/1762652580.342782 | 1762652580.342783 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | meditsolutions/Llama-3.1-MedIT-SUN-8B | meditsolutions/Llama-3.1-MedIT-SUN-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7837293935646308}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | meditsolutions/Llama-3.2-SUN-2.4B-checkpoint-26000 | 85ccad14-a4eb-41c8-b1b7-f2d0215c358a | 0.0.1 | hfopenllm_v2/meditsolutions_Llama-3.2-SUN-2.4B-checkpoint-26000/1762652580.3434849 | 1762652580.343486 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | meditsolutions/Llama-3.2-SUN-2.4B-checkpoint-26000 | meditsolutions/Llama-3.2-SUN-2.4B-checkpoint-26000 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.28139447776344545}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 2.209} |
HF Open LLM v2 | meta | meditsolutions/Llama-3.2-SUN-2.4B-v1.0.0 | bba22496-6f3a-4ddb-8a69-5995e72aa15f | 0.0.1 | hfopenllm_v2/meditsolutions_Llama-3.2-SUN-2.4B-v1.0.0/1762652580.343897 | 1762652580.343898 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | meditsolutions/Llama-3.2-SUN-2.4B-v1.0.0 | meditsolutions/Llama-3.2-SUN-2.4B-v1.0.0 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5636865738462834}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 2.472} |
HF Open LLM v2 | meta | HPAI-BSC/Llama3.1-Aloe-Beta-8B | d7410909-8a7c-4afb-9cab-2537f837a9a1 | 0.0.1 | hfopenllm_v2/HPAI-BSC_Llama3.1-Aloe-Beta-8B/1762652579.636478 | 1762652579.636513 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | HPAI-BSC/Llama3.1-Aloe-Beta-8B | HPAI-BSC/Llama3.1-Aloe-Beta-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7253276860951166}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | HPAI-BSC/Llama3-Aloe-8B-Alpha | 10d1f626-64f0-4f43-9597-1221cf94c948 | 0.0.1 | hfopenllm_v2/HPAI-BSC_Llama3-Aloe-8B-Alpha/1762652579.6361432 | 1762652579.6361442 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | HPAI-BSC/Llama3-Aloe-8B-Alpha | HPAI-BSC/Llama3-Aloe-8B-Alpha | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5081073773144147}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | vicgalle/Roleplay-Llama-3-8B | 89bafcc1-b175-45ec-b365-45938c1e8f33 | 0.0.1 | hfopenllm_v2/vicgalle_Roleplay-Llama-3-8B/1762652580.5885959 | 1762652580.588597 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | vicgalle/Roleplay-Llama-3-8B | vicgalle/Roleplay-Llama-3-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7320221456845614}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | vicgalle/Humanish-RP-Llama-3.1-8B | 3b0e49aa-931b-4625-8e59-fed02b31372e | 0.0.1 | hfopenllm_v2/vicgalle_Humanish-RP-Llama-3.1-8B/1762652580.587956 | 1762652580.587957 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | vicgalle/Humanish-RP-Llama-3.1-8B | vicgalle/Humanish-RP-Llama-3.1-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6669259786256023}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B | 469379ff-5526-44f4-be9b-8bf6185b917e | 0.0.1 | hfopenllm_v2/vicgalle_Configurable-Hermes-2-Pro-Llama-3-8B/1762652580.5867279 | 1762652580.586729 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B | vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5762510139762497}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.031} |
HF Open LLM v2 | meta | SkyOrbis/SKY-Ko-Llama3.2-3B-lora-epoch3 | 6d191a68-8817-468a-850b-01f5ba76e05f | 0.0.1 | hfopenllm_v2/SkyOrbis_SKY-Ko-Llama3.2-3B-lora-epoch3/1762652579.887351 | 1762652579.8873532 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SkyOrbis/SKY-Ko-Llama3.2-3B-lora-epoch3 | SkyOrbis/SKY-Ko-Llama3.2-3B-lora-epoch3 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5331121424487028}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | SkyOrbis/SKY-Ko-Llama3.1-8B-lora | fffe8411-9f9c-48ce-adb5-8d483022bffe | 0.0.1 | hfopenllm_v2/SkyOrbis_SKY-Ko-Llama3.1-8B-lora/1762652579.88546 | 1762652579.885461 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SkyOrbis/SKY-Ko-Llama3.1-8B-lora | SkyOrbis/SKY-Ko-Llama3.1-8B-lora | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5058345190760515}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-v2-epoch3 | f45610c5-ead3-4670-9639-aa30fb145829 | 0.0.1 | hfopenllm_v2/SkyOrbis_SKY-Ko-Llama3.2-1B-lora-v2-epoch3/1762652579.886383 | 1762652579.886384 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-v2-epoch3 | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-v2-epoch3 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4359920566319587}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | SkyOrbis/SKY-Ko-Llama3.2-3B-lora-epoch1 | 08fdfb9e-7998-4483-bb1a-4ea7f0e2980e | 0.0.1 | hfopenllm_v2/SkyOrbis_SKY-Ko-Llama3.2-3B-lora-epoch1/1762652579.886793 | 1762652579.886794 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SkyOrbis/SKY-Ko-Llama3.2-3B-lora-epoch1 | SkyOrbis/SKY-Ko-Llama3.2-3B-lora-epoch1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5331121424487028}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-v2-epoch5 | 34a1eda3-2a02-4522-955a-7ed3f1ee97d6 | 0.0.1 | hfopenllm_v2/SkyOrbis_SKY-Ko-Llama3.2-1B-lora-v2-epoch5/1762652579.8865862 | 1762652579.886587 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-v2-epoch5 | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-v2-epoch5 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.42467652495378927}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-epoch5 | 19c08486-99c5-4f53-a6cc-69cb58e0808a | 0.0.1 | hfopenllm_v2/SkyOrbis_SKY-Ko-Llama3.2-1B-lora-epoch5/1762652579.8861618 | 1762652579.886163 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-epoch5 | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-epoch5 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4359920566319587}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | SkyOrbis/SKY-Ko-Llama3.2-3B-lora-epoch2 | 37a5a439-e2ac-46ec-af94-b60f127157de | 0.0.1 | hfopenllm_v2/SkyOrbis_SKY-Ko-Llama3.2-3B-lora-epoch2/1762652579.887009 | 1762652579.88701 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SkyOrbis/SKY-Ko-Llama3.2-3B-lora-epoch2 | SkyOrbis/SKY-Ko-Llama3.2-3B-lora-epoch2 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5331121424487028}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | meta | SkyOrbis/SKY-Ko-Llama3.1-8B-lora-epoch1 | da7be2d8-96ff-4902-9628-c1781391c68e | 0.0.1 | hfopenllm_v2/SkyOrbis_SKY-Ko-Llama3.1-8B-lora-epoch1/1762652579.8857 | 1762652579.8857012 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SkyOrbis/SKY-Ko-Llama3.1-8B-lora-epoch1 | SkyOrbis/SKY-Ko-Llama3.1-8B-lora-epoch1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5058345190760515}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-epoch3 | d0e4c608-0c64-4cf4-aee6-714475d500db | 0.0.1 | hfopenllm_v2/SkyOrbis_SKY-Ko-Llama3.2-1B-lora-epoch3/1762652579.8859022 | 1762652579.8859022 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-epoch3 | SkyOrbis/SKY-Ko-Llama3.2-1B-lora-epoch3 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3247084402718121}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | meta | dfurman/Llama-3-70B-Orpo-v0.1 | 854d263a-00cc-488a-83eb-c69bb74da5b5 | 0.0.1 | hfopenllm_v2/dfurman_Llama-3-70B-Orpo-v0.1/1762652580.124833 | 1762652580.124834 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | dfurman/Llama-3-70B-Orpo-v0.1 | dfurman/Llama-3-70B-Orpo-v0.1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20490742341431845}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | meta | dfurman/Llama-3-8B-Orpo-v0.1 | 10047fc1-254f-406c-807c-3274d9780550 | 0.0.1 | hfopenllm_v2/dfurman_Llama-3-8B-Orpo-v0.1/1762652580.125153 | 1762652580.125154 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | dfurman/Llama-3-8B-Orpo-v0.1 | dfurman/Llama-3-8B-Orpo-v0.1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.28351773294857646}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "?", "params_billions": 8.03} |
HF Open LLM v2 | meta | dfurman/Llama-3-8B-Orpo-v0.1 | 0a6a3c2b-c0f5-44c7-9ac2-e278a303197e | 0.0.1 | hfopenllm_v2/dfurman_Llama-3-8B-Orpo-v0.1/1762652580.1253839 | 1762652580.125385 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | dfurman/Llama-3-8B-Orpo-v0.1 | dfurman/Llama-3-8B-Orpo-v0.1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3000039894147528}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | princeton-nlp/Sheared-LLaMA-1.3B | 578905fb-a4a6-4dcd-9b09-ff5289568b91 | 0.0.1 | hfopenllm_v2/princeton-nlp_Sheared-LLaMA-1.3B/1762652580.4538639 | 1762652580.453865 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Sheared-LLaMA-1.3B | princeton-nlp/Sheared-LLaMA-1.3B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2197702097102355}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.3} |
HF Open LLM v2 | meta | princeton-nlp/Llama-3-8B-ProLong-64k-Base | 171a1779-0f17-4514-96ae-e4f9acea86b4 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-8B-ProLong-64k-Base/1762652580.443676 | 1762652580.443677 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-8B-ProLong-64k-Base | princeton-nlp/Llama-3-8B-ProLong-64k-Base | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5200722970606879}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | princeton-nlp/Sheared-LLaMA-2.7B | 3a0252c3-ced9-4cb4-94ef-d3800ac15ff9 | 0.0.1 | hfopenllm_v2/princeton-nlp_Sheared-LLaMA-2.7B/1762652580.4540951 | 1762652580.4540958 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Sheared-LLaMA-2.7B | princeton-nlp/Sheared-LLaMA-2.7B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.24165214962964932}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 2.7} |
HF Open LLM v2 | meta | princeton-nlp/Llama-3-8B-ProLong-512k-Base | 6c3d4b07-14c5-4218-862f-2aca386f5144 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-8B-ProLong-512k-Base/1762652580.442863 | 1762652580.4428642 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-8B-ProLong-512k-Base | princeton-nlp/Llama-3-8B-ProLong-512k-Base | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5322123077877808}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Hastagaras/Zabuza-8B-Llama-3.1 | fb698ce2-d422-46eb-aa98-17fb7645461a | 0.0.1 | hfopenllm_v2/Hastagaras_Zabuza-8B-Llama-3.1/1762652579.638141 | 1762652579.6381419 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Hastagaras/Zabuza-8B-Llama-3.1 | Hastagaras/Zabuza-8B-Llama-3.1 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6265342624237025}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Hastagaras/Llama-3.1-Jamet-8B-MK.I | be7d90fa-86be-4f3b-a3ef-2e1475b7bd64 | 0.0.1 | hfopenllm_v2/Hastagaras_Llama-3.1-Jamet-8B-MK.I/1762652579.637886 | 1762652579.637887 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Hastagaras/Llama-3.1-Jamet-8B-MK.I | Hastagaras/Llama-3.1-Jamet-8B-MK.I | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7338207068356406}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | UKzExecution/LlamaExecutor-8B-3.0.5 | 0f2ddff5-6077-4166-8fe4-ade89d3a6003 | 0.0.1 | hfopenllm_v2/UKzExecution_LlamaExecutor-8B-3.0.5/1762652579.938387 | 1762652579.938387 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | UKzExecution/LlamaExecutor-8B-3.0.5 | UKzExecution/LlamaExecutor-8B-3.0.5 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.740290207759855}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | MagusCorp/grpo_lora_enem_llama3_7b | 22c931f2-cf99-46b1-b4f8-50db5a172a66 | 0.0.1 | hfopenllm_v2/MagusCorp_grpo_lora_enem_llama3_7b/1762652579.745377 | 1762652579.745378 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | MagusCorp/grpo_lora_enem_llama3_7b | MagusCorp/grpo_lora_enem_llama3_7b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4723622211288271}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | lemon07r/llama-3-NeuralMahou-8b | 13b8357d-225e-4ba0-bf34-45479a562532 | 0.0.1 | hfopenllm_v2/lemon07r_llama-3-NeuralMahou-8b/1762652580.319005 | 1762652580.319006 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | lemon07r/llama-3-NeuralMahou-8b | lemon07r/llama-3-NeuralMahou-8b | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.49009738604680025}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | lemon07r/Llama-3-RedMagic4-8B | 22ae03c6-dd4f-4263-a005-624dae701da3 | 0.0.1 | hfopenllm_v2/lemon07r_Llama-3-RedMagic4-8B/1762652580.318728 | 1762652580.318729 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | lemon07r/Llama-3-RedMagic4-8B | lemon07r/Llama-3-RedMagic4-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4864005283758206}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | bfuzzy1/acheron-m1a-llama | da59bcfb-1f9a-41e5-9a8c-14f672dce595 | 0.0.1 | hfopenllm_v2/bfuzzy1_acheron-m1a-llama/1762652580.0322502 | 1762652580.032251 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bfuzzy1/acheron-m1a-llama | bfuzzy1/acheron-m1a-llama | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.11245827737070972}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 0.514} |
HF Open LLM v2 | meta | rombodawg/rombos_Replete-Coder-Llama3-8B | af3522f6-e26f-491f-8ccc-df064e5d3010 | 0.0.1 | hfopenllm_v2/rombodawg_rombos_Replete-Coder-Llama3-8B/1762652580.5000498 | 1762652580.500051 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | rombodawg/rombos_Replete-Coder-Llama3-8B | rombodawg/rombos_Replete-Coder-Llama3-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4714125187834945}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | aloobun/Meta-Llama-3-7B-28Layers | f020ec4e-f026-4034-a219-1aacfcbb16b0 | 0.0.1 | hfopenllm_v2/aloobun_Meta-Llama-3-7B-28Layers/1762652580.0090299 | 1762652580.0090308 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | aloobun/Meta-Llama-3-7B-28Layers | aloobun/Meta-Llama-3-7B-28Layers | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19636453498938372}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 7.158} |
HF Open LLM v2 | meta | Locutusque/Hercules-6.1-Llama-3.1-8B | 267ac6ef-168e-489b-a7cc-0ff448b0acbf | 0.0.1 | hfopenllm_v2/Locutusque_Hercules-6.1-Llama-3.1-8B/1762652579.735234 | 1762652579.735234 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Locutusque/Hercules-6.1-Llama-3.1-8B | Locutusque/Hercules-6.1-Llama-3.1-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6006806384836678}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Locutusque/Hercules-6.0-Llama-3.1-8B | 2084dde6-b1e3-457b-9854-ace18cc5d943 | 0.0.1 | hfopenllm_v2/Locutusque_Hercules-6.0-Llama-3.1-8B/1762652579.734967 | 1762652579.734968 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Locutusque/Hercules-6.0-Llama-3.1-8B | Locutusque/Hercules-6.0-Llama-3.1-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6630041622893922}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Locutusque/Llama-3-Yggdrasil-2.0-8B | 478f0d4e-41e5-41c7-b9da-07db69c1d561 | 0.0.1 | hfopenllm_v2/Locutusque_Llama-3-Yggdrasil-2.0-8B/1762652579.7359009 | 1762652579.735904 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Locutusque/Llama-3-Yggdrasil-2.0-8B | Locutusque/Llama-3-Yggdrasil-2.0-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5370583385417359}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | Locutusque/Llama-3-NeuralHercules-5.0-8B | 0c540f58-808b-42fc-b4b9-346367742f70 | 0.0.1 | hfopenllm_v2/Locutusque_Llama-3-NeuralHercules-5.0-8B/1762652579.735453 | 1762652579.735453 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Locutusque/Llama-3-NeuralHercules-5.0-8B | Locutusque/Llama-3-NeuralHercules-5.0-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4489310584803876}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | EnnoAi/EnnoAi-Pro-Llama-3.1-8B-v1.0 | 32c712e0-4f63-4188-b4c8-5f37b6101e3f | 0.0.1 | hfopenllm_v2/EnnoAi_EnnoAi-Pro-Llama-3.1-8B-v1.0/1762652579.596818 | 1762652579.596819 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EnnoAi/EnnoAi-Pro-Llama-3.1-8B-v1.0 | EnnoAi/EnnoAi-Pro-Llama-3.1-8B-v1.0 | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4704384366813389}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | meta | JackFram/llama-68m | 3b05e3fd-4bf0-42a3-8dc5-13292ece8c77 | 0.0.1 | hfopenllm_v2/JackFram_llama-68m/1762652579.650121 | 1762652579.650121 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | JackFram/llama-68m | JackFram/llama-68m | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17263416623448008}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 0.068} |
HF Open LLM v2 | meta | JackFram/llama-160m | 11a0fc6d-5370-456e-8c01-5d7ed19e4b59 | 0.0.1 | hfopenllm_v2/JackFram_llama-160m/1762652579.649858 | 1762652579.649858 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | JackFram/llama-160m | JackFram/llama-160m | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1791036671586945}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 0.162} |
HF Open LLM v2 | meta | aaditya/Llama3-OpenBioLLM-70B | e68ae3f7-3f46-43bb-8e14-0523af96998e | 0.0.1 | hfopenllm_v2/aaditya_Llama3-OpenBioLLM-70B/1762652579.969287 | 1762652579.9692879 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | aaditya/Llama3-OpenBioLLM-70B | aaditya/Llama3-OpenBioLLM-70B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7596743307756753}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.0} |
HF Open LLM v2 | meta | m42-health/Llama3-Med42-70B | 36ebe051-2bac-46cb-b990-33025df0ccac | 0.0.1 | hfopenllm_v2/m42-health_Llama3-Med42-70B/1762652580.328667 | 1762652580.328667 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | m42-health/Llama3-Med42-70B | m42-health/Llama3-Med42-70B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6291074349392944}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | meta | DevQuasar/DevQuasar-R1-Uncensored-Llama-8B | 490df557-2f50-434a-a28d-a78a234da9fa | 0.0.1 | hfopenllm_v2/DevQuasar_DevQuasar-R1-Uncensored-Llama-8B/1762652579.555449 | 1762652579.5554502 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DevQuasar/DevQuasar-R1-Uncensored-Llama-8B | DevQuasar/DevQuasar-R1-Uncensored-Llama-8B | meta | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.38488432913558246}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.