_leaderboard stringclasses 1
value | _developer stringclasses 559
values | _model stringlengths 9 102 | _uuid stringlengths 36 36 | schema_version stringclasses 1
value | evaluation_id stringlengths 35 133 | retrieved_timestamp stringlengths 13 18 | source_data stringclasses 1
value | evaluation_source_name stringclasses 1
value | evaluation_source_type stringclasses 1
value | source_organization_name stringclasses 1
value | source_organization_url null | source_organization_logo_url null | evaluator_relationship stringclasses 1
value | model_name stringlengths 4 102 | model_id stringlengths 9 102 | model_developer stringclasses 559
values | model_inference_platform stringclasses 1
value | evaluation_results stringlengths 1.35k 1.41k | additional_details stringclasses 660
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
HF Open LLM v2 | invalid-coder | invalid-coder/Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp | cdb8a900-75f3-4e6b-9d35-5a6791e8acd1 | 0.0.1 | hfopenllm_v2/invalid-coder_Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp/1762652580.229043 | 1762652580.229047 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | invalid-coder/Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp | invalid-coder/Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp | invalid-coder | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.45547591501660034}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 10.732} |
HF Open LLM v2 | Tsunami-th | Tsunami-th/Tsunami-1.0-14B-Instruct | 11262698-480b-425b-b013-f362fae2f254 | 0.0.1 | hfopenllm_v2/Tsunami-th_Tsunami-1.0-14B-Instruct/1762652579.935597 | 1762652579.935597 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Tsunami-th/Tsunami-1.0-14B-Instruct | Tsunami-th/Tsunami-1.0-14B-Instruct | Tsunami-th | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7829049145157072}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | Tsunami-th | Tsunami-th/Tsunami-0.5x-7B-Instruct | fec678b9-c51b-4945-8d4f-f06af6528227 | 0.0.1 | hfopenllm_v2/Tsunami-th_Tsunami-0.5x-7B-Instruct/1762652579.9353971 | 1762652579.9353979 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Tsunami-th/Tsunami-0.5x-7B-Instruct | Tsunami-th/Tsunami-0.5x-7B-Instruct | Tsunami-th | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.709915247099917}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Tsunami-th | Tsunami-th/Tsunami-1.0-7B-Instruct | ccffe03b-c166-48de-8516-8253b2c2f96e | 0.0.1 | hfopenllm_v2/Tsunami-th_Tsunami-1.0-7B-Instruct/1762652579.9358132 | 1762652579.9358132 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Tsunami-th/Tsunami-1.0-7B-Instruct | Tsunami-th/Tsunami-1.0-7B-Instruct | Tsunami-th | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.730872972601586}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Tsunami-th | Tsunami-th/Tsunami-0.5-7B-Instruct | df3de449-9abc-4f0a-ba6e-caa48720893a | 0.0.1 | hfopenllm_v2/Tsunami-th_Tsunami-0.5-7B-Instruct/1762652579.935141 | 1762652579.9351418 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Tsunami-th/Tsunami-0.5-7B-Instruct | Tsunami-th/Tsunami-0.5-7B-Instruct | Tsunami-th | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7400153814102137}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | OEvortex | OEvortex/HelpingAI2.5-10B | 6a41fcba-f13d-4839-8a91-ff3f18de5114 | 0.0.1 | hfopenllm_v2/OEvortex_HelpingAI2.5-10B/1762652579.798051 | 1762652579.798051 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | OEvortex/HelpingAI2.5-10B | OEvortex/HelpingAI2.5-10B | OEvortex | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.32765617450586665}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 10.211} |
HF Open LLM v2 | OEvortex | OEvortex/HelpingAI-15B | 4ffdc303-b5e4-45f0-839c-432f04dc5d57 | 0.0.1 | hfopenllm_v2/OEvortex_HelpingAI-15B/1762652579.797408 | 1762652579.797409 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | OEvortex/HelpingAI-15B | OEvortex/HelpingAI-15B | OEvortex | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2030091268944179}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 15.323} |
HF Open LLM v2 | OEvortex | OEvortex/HelpingAI-3B-reloaded | 628026b2-efc1-4592-a85b-f5d2ea1dc1dd | 0.0.1 | hfopenllm_v2/OEvortex_HelpingAI-3B-reloaded/1762652579.797647 | 1762652579.797647 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | OEvortex/HelpingAI-3B-reloaded | OEvortex/HelpingAI-3B-reloaded | OEvortex | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.46466819150963884}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 2.81} |
HF Open LLM v2 | OEvortex | OEvortex/HelpingAI2-9B | d04d6474-5784-4492-8347-a2bc03eca6ba | 0.0.1 | hfopenllm_v2/OEvortex_HelpingAI2-9B/1762652579.797843 | 1762652579.797844 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | OEvortex/HelpingAI2-9B | OEvortex/HelpingAI2-9B | OEvortex | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.44131238447319776}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.903} |
HF Open LLM v2 | Yuma42 | Yuma42/KangalKhan-RawRuby-7B | 4ad4a260-770a-4cce-9ba7-546cfa4cde58 | 0.0.1 | hfopenllm_v2/Yuma42_KangalKhan-RawRuby-7B/1762652579.9648829 | 1762652579.964884 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Yuma42/KangalKhan-RawRuby-7B | Yuma42/KangalKhan-RawRuby-7B | Yuma42 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.547674614467391}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | Omkar1102 | Omkar1102/code-yi | 3edef2ec-9fad-45ba-8fde-4af5c4f24d69 | 0.0.1 | hfopenllm_v2/Omkar1102_code-yi/1762652579.798722 | 1762652579.798723 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Omkar1102/code-yi | Omkar1102/code-yi | Omkar1102 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2254407195131141}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 2.084} |
HF Open LLM v2 | Omkar1102 | Omkar1102/code-yi | 2609af14-3cff-4b19-9741-e1caca56f58a | 0.0.1 | hfopenllm_v2/Omkar1102_code-yi/1762652579.79849 | 1762652579.7984908 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Omkar1102/code-yi | Omkar1102/code-yi | Omkar1102 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21477457590304835}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 2.084} |
HF Open LLM v2 | sabersaleh | sabersaleh/Llama2-7B-DPO | c2ffce0d-069d-48bb-989c-6fb18bdd9059 | 0.0.1 | hfopenllm_v2/sabersaleh_Llama2-7B-DPO/1762652580.50325 | 1762652580.503252 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sabersaleh/Llama2-7B-DPO | sabersaleh/Llama2-7B-DPO | sabersaleh | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.14533105493424114}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | rmdhirr | rmdhirr/Gluon-8B | a1f5e06b-17f7-41d1-ab9d-c0e4b22d10cf | 0.0.1 | hfopenllm_v2/rmdhirr_Gluon-8B/1762652580.496151 | 1762652580.4961522 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | rmdhirr/Gluon-8B | rmdhirr/Gluon-8B | rmdhirr | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5052848663767692}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | LilRg | LilRg/PRYMMAL-slerp-Merge | 9574abe0-00e3-4e38-bda0-b217f002a480 | 0.0.1 | hfopenllm_v2/LilRg_PRYMMAL-slerp-Merge/1762652579.732816 | 1762652579.732817 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LilRg/PRYMMAL-slerp-Merge | LilRg/PRYMMAL-slerp-Merge | LilRg | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.304400102838247}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | LilRg | LilRg/PRYMMAL-ECE-7B-SLERP-V5 | 150d0730-e194-4d2b-96e1-54f914b5fe28 | 0.0.1 | hfopenllm_v2/LilRg_PRYMMAL-ECE-7B-SLERP-V5/1762652579.7321632 | 1762652579.7321641 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LilRg/PRYMMAL-ECE-7B-SLERP-V5 | LilRg/PRYMMAL-ECE-7B-SLERP-V5 | LilRg | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.12492298213185458}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | LilRg | LilRg/10PRYMMAL-3B-slerp | e9371530-675d-48d1-9145-7ea15c893833 | 0.0.1 | hfopenllm_v2/LilRg_10PRYMMAL-3B-slerp/1762652579.7308428 | 1762652579.7308428 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LilRg/10PRYMMAL-3B-slerp | LilRg/10PRYMMAL-3B-slerp | LilRg | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1945903535951276}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | LilRg | LilRg/ECE-1B-merge-PRYMMAL | 3fefac8e-d5aa-4998-ab60-6e3dcc49f77f | 0.0.1 | hfopenllm_v2/LilRg_ECE-1B-merge-PRYMMAL/1762652579.7310941 | 1762652579.731095 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LilRg/ECE-1B-merge-PRYMMAL | LilRg/ECE-1B-merge-PRYMMAL | LilRg | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.27122811916825135}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.777} |
HF Open LLM v2 | LilRg | LilRg/PRYMMAL-ECE-7B-SLERP-V3 | a656eacf-8134-446c-8417-e1c3c54fe941 | 0.0.1 | hfopenllm_v2/LilRg_PRYMMAL-ECE-7B-SLERP-V3/1762652579.731744 | 1762652579.731745 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LilRg/PRYMMAL-ECE-7B-SLERP-V3 | LilRg/PRYMMAL-ECE-7B-SLERP-V3 | LilRg | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.12432346174816154}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | LilRg | LilRg/PRYMMAL-6B-slerp | 8fedde0a-96fe-4a6f-9e0f-87832cfd418e | 0.0.1 | hfopenllm_v2/LilRg_PRYMMAL-6B-slerp/1762652579.731526 | 1762652579.7315269 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LilRg/PRYMMAL-6B-slerp | LilRg/PRYMMAL-6B-slerp | LilRg | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.11533065599276586}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 3.293} |
HF Open LLM v2 | LilRg | LilRg/PRYMMAL-ECE-7B-SLERP-V6 | b23913b9-f774-4927-be16-874d8e146218 | 0.0.1 | hfopenllm_v2/LilRg_PRYMMAL-ECE-7B-SLERP-V6/1762652579.732379 | 1762652579.732379 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LilRg/PRYMMAL-ECE-7B-SLERP-V6 | LilRg/PRYMMAL-ECE-7B-SLERP-V6 | LilRg | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.12432346174816154}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | LilRg | LilRg/ECE_Finetunning | f20fd926-d690-4fe2-80a4-3e79dc37f03f | 0.0.1 | hfopenllm_v2/LilRg_ECE_Finetunning/1762652579.731307 | 1762652579.731308 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LilRg/ECE_Finetunning | LilRg/ECE_Finetunning | LilRg | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.04453849120334047}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "?", "params_billions": 16.061} |
HF Open LLM v2 | LilRg | LilRg/PRYMMAL-ECE-7B-SLERP-V7 | dd12d7df-9b32-4d2a-ae9a-40304cf4bfd7 | 0.0.1 | hfopenllm_v2/LilRg_PRYMMAL-ECE-7B-SLERP-V7/1762652579.732605 | 1762652579.732606 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LilRg/PRYMMAL-ECE-7B-SLERP-V7 | LilRg/PRYMMAL-ECE-7B-SLERP-V7 | LilRg | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.12492298213185458}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | LilRg | LilRg/PRYMMAL-ECE-7B-SLERP-V4 | 0d276bd3-a338-4383-88b0-9e653ae01387 | 0.0.1 | hfopenllm_v2/LilRg_PRYMMAL-ECE-7B-SLERP-V4/1762652579.731953 | 1762652579.7319539 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LilRg/PRYMMAL-ECE-7B-SLERP-V4 | LilRg/PRYMMAL-ECE-7B-SLERP-V4 | LilRg | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.12492298213185458}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | TeeZee | TeeZee/DoubleBagel-57B-v1.0 | 1315f2ad-2e39-4cab-b09a-c74d0779f895 | 0.0.1 | hfopenllm_v2/TeeZee_DoubleBagel-57B-v1.0/1762652579.9121659 | 1762652579.9121659 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TeeZee/DoubleBagel-57B-v1.0 | TeeZee/DoubleBagel-57B-v1.0 | TeeZee | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.23363342597640924}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 56.703} |
HF Open LLM v2 | realtreetune | realtreetune/rho-1b-sft-MATH | 86234365-2d3e-4d49-96e8-8f034990c902 | 0.0.1 | hfopenllm_v2/realtreetune_rho-1b-sft-MATH/1762652580.4910588 | 1762652580.49106 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | realtreetune/rho-1b-sft-MATH | realtreetune/rho-1b-sft-MATH | realtreetune | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.212101668018635}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.1} |
HF Open LLM v2 | Aurel9 | Aurel9/testmerge-7b | eb45737a-74bc-482d-9d7f-d2bd1d876c77 | 0.0.1 | hfopenllm_v2/Aurel9_testmerge-7b/1762652579.485724 | 1762652579.485725 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Aurel9/testmerge-7b | Aurel9/testmerge-7b | Aurel9 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3979984219648311}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | c10x | c10x/Q-Pluse | 2093ba5f-d2f8-45d2-bcf7-ff48810c47af | 0.0.1 | hfopenllm_v2/c10x_Q-Pluse/1762652580.070795 | 1762652580.070796 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | c10x/Q-Pluse | c10x/Q-Pluse | c10x | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.11228318638988993}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | c10x | c10x/longthinker | fe7bd3bb-71a4-46dd-a86d-b5a24b685fa5 | 0.0.1 | hfopenllm_v2/c10x_longthinker/1762652580.078971 | 1762652580.078974 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | c10x/longthinker | c10x/longthinker | c10x | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.36087913403103766}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Lyte | Lyte/Llama-3.2-1B-Instruct-COT-RL-Expriement1-EP04 | ea928079-f00f-41b1-a628-c1539b41e63d | 0.0.1 | hfopenllm_v2/Lyte_Llama-3.2-1B-Instruct-COT-RL-Expriement1-EP04/1762652579.7416818 | 1762652579.741683 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Lyte/Llama-3.2-1B-Instruct-COT-RL-Expriement1-EP04 | Lyte/Llama-3.2-1B-Instruct-COT-RL-Expriement1-EP04 | Lyte | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5773503193748144}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | Lyte | Lyte/Llama-3.1-8B-Instruct-Reasoner-1o1_v0.3 | 8fdc62c0-215c-4502-8f56-188455fe2d9e | 0.0.1 | hfopenllm_v2/Lyte_Llama-3.1-8B-Instruct-Reasoner-1o1_v0.3/1762652579.74142 | 1762652579.74142 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Lyte/Llama-3.1-8B-Instruct-Reasoner-1o1_v0.3 | Lyte/Llama-3.1-8B-Instruct-Reasoner-1o1_v0.3 | Lyte | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7098155117310957}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.0} |
HF Open LLM v2 | NeverSleep | NeverSleep/Lumimaid-v0.2-8B | 6d7f1ac9-66c8-4700-87a9-0e413fc8878e | 0.0.1 | hfopenllm_v2/NeverSleep_Lumimaid-v0.2-8B/1762652579.771939 | 1762652579.771939 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NeverSleep/Lumimaid-v0.2-8B | NeverSleep/Lumimaid-v0.2-8B | NeverSleep | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5038109992597419}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | NeverSleep | NeverSleep/Lumimaid-v0.2-12B | cee1293c-54fb-4275-b5a9-0215e5f9a4c0 | 0.0.1 | hfopenllm_v2/NeverSleep_Lumimaid-v0.2-12B/1762652579.771668 | 1762652579.771669 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NeverSleep/Lumimaid-v0.2-12B | NeverSleep/Lumimaid-v0.2-12B | NeverSleep | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.10993497253952846}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | DeepAutoAI | DeepAutoAI/ldm_soup_Llama-3.1-8B-Instruct-v0.1 | a7ba1534-464f-45ba-834f-5f501b155c20 | 0.0.1 | hfopenllm_v2/DeepAutoAI_ldm_soup_Llama-3.1-8B-Instruct-v0.1/1762652579.550273 | 1762652579.5502741 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DeepAutoAI/ldm_soup_Llama-3.1-8B-Instruct-v0.1 | DeepAutoAI/ldm_soup_Llama-3.1-8B-Instruct-v0.1 | DeepAutoAI | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7889499860370484}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | DeepAutoAI | DeepAutoAI/ldm_soup_Llama-3.1-8B-Instruct-v0.0 | fb8eb882-26a9-4008-9226-90d44d38b54f | 0.0.1 | hfopenllm_v2/DeepAutoAI_ldm_soup_Llama-3.1-8B-Instruct-v0.0/1762652579.5500422 | 1762652579.5500429 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DeepAutoAI/ldm_soup_Llama-3.1-8B-Instruct-v0.0 | DeepAutoAI/ldm_soup_Llama-3.1-8B-Instruct-v0.0 | DeepAutoAI | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7889499860370484}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | DeepAutoAI | DeepAutoAI/d2nwg_Llama-3.1-8B-Instruct-v0.0 | d5d73b84-4436-47bf-967e-c9be94898189 | 0.0.1 | hfopenllm_v2/DeepAutoAI_d2nwg_Llama-3.1-8B-Instruct-v0.0/1762652579.548984 | 1762652579.548985 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DeepAutoAI/d2nwg_Llama-3.1-8B-Instruct-v0.0 | DeepAutoAI/d2nwg_Llama-3.1-8B-Instruct-v0.0 | DeepAutoAI | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7892746800711002}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | JungZoona | JungZoona/T3Q-Qwen2.5-14B-Instruct-1M-e3 | 464673ee-0238-40b4-9c15-1a1551b9f65c | 0.0.1 | hfopenllm_v2/JungZoona_T3Q-Qwen2.5-14B-Instruct-1M-e3/1762652579.696794 | 1762652579.696794 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | JungZoona/T3Q-Qwen2.5-14B-Instruct-1M-e3 | JungZoona/T3Q-Qwen2.5-14B-Instruct-1M-e3 | JungZoona | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.732396707403024}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Unknown", "params_billions": 0.0} |
HF Open LLM v2 | Supichi | Supichi/NJS26 | f336c7ee-2275-4045-a227-1a7abbaebf63 | 0.0.1 | hfopenllm_v2/Supichi_NJS26/1762652579.8961651 | 1762652579.8961651 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Supichi/NJS26 | Supichi/NJS26 | Supichi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.04481331755298164}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | Supichi | Supichi/BBAI_275_Tsunami_gZ | 173028b9-03e3-44d7-a7e9-2c0c5c6f4b4e | 0.0.1 | hfopenllm_v2/Supichi_BBAI_275_Tsunami_gZ/1762652579.895135 | 1762652579.895135 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Supichi/BBAI_275_Tsunami_gZ | Supichi/BBAI_275_Tsunami_gZ | Supichi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5369586031729146}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Supichi | Supichi/BBAI_250_Xia0_gZ | 068a06f4-3fdc-495f-b7e4-0effebe24e42 | 0.0.1 | hfopenllm_v2/Supichi_BBAI_250_Xia0_gZ/1762652579.894933 | 1762652579.894933 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Supichi/BBAI_250_Xia0_gZ | Supichi/BBAI_250_Xia0_gZ | Supichi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4685401401614383}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Supichi | Supichi/BBA99 | fa793cb5-5522-4777-8d6f-e4719a51f767 | 0.0.1 | hfopenllm_v2/Supichi_BBA99/1762652579.8942661 | 1762652579.8942661 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Supichi/BBA99 | Supichi/BBA99 | Supichi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.14066011516110588}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 17.161} |
HF Open LLM v2 | Supichi | Supichi/BBAIK29 | de5f2ab9-f1d2-49bc-9771-41b9da1bdfa3 | 0.0.1 | hfopenllm_v2/Supichi_BBAIK29/1762652579.89447 | 1762652579.894471 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Supichi/BBAIK29 | Supichi/BBAIK29 | Supichi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.45884807865352817}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Supichi | Supichi/BBAI_QWEEN_V000000_LUMEN_14B | 57fd3fdc-dfdd-44ee-8c30-dc5ce4a0df8d | 0.0.1 | hfopenllm_v2/Supichi_BBAI_QWEEN_V000000_LUMEN_14B/1762652579.895749 | 1762652579.8957498 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Supichi/BBAI_QWEEN_V000000_LUMEN_14B | Supichi/BBAI_QWEEN_V000000_LUMEN_14B | Supichi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18145188100905596}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 10.366} |
HF Open LLM v2 | Supichi | Supichi/BBAI_525_Tsu_gZ_Xia0 | 6b6b273e-9cf0-405e-b1e4-5fdbd2ae16d9 | 0.0.1 | hfopenllm_v2/Supichi_BBAI_525_Tsu_gZ_Xia0/1762652579.8953412 | 1762652579.8953412 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Supichi/BBAI_525_Tsu_gZ_Xia0 | Supichi/BBAI_525_Tsu_gZ_Xia0 | Supichi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5338612658856279}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Supichi | Supichi/HF_TOKEN | cd0ccaff-e1b3-4c11-a8a0-37137d0386e2 | 0.0.1 | hfopenllm_v2/Supichi_HF_TOKEN/1762652579.895958 | 1762652579.895958 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Supichi/HF_TOKEN | Supichi/HF_TOKEN | Supichi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1379872072766925}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 17.161} |
HF Open LLM v2 | Supichi | Supichi/BBAI_78B_Calme_3_1_Ties | a9c4a482-6b02-4cf6-a7d5-3e16334df634 | 0.0.1 | hfopenllm_v2/Supichi_BBAI_78B_Calme_3_1_Ties/1762652579.895541 | 1762652579.895541 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Supichi/BBAI_78B_Calme_3_1_Ties | Supichi/BBAI_78B_Calme_3_1_Ties | Supichi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18280052482967415}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 27.06} |
HF Open LLM v2 | Supichi | Supichi/BBA-123 | a469604f-f755-46e0-8b1c-db4a365dec34 | 0.0.1 | hfopenllm_v2/Supichi_BBA-123/1762652579.894015 | 1762652579.894016 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Supichi/BBA-123 | Supichi/BBA-123 | Supichi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2079548930171944}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 17.161} |
HF Open LLM v2 | RLHFlow | RLHFlow/LLaMA3-iterative-DPO-final | 8ccda2e0-9801-41b0-8491-eb36615860f2 | 0.0.1 | hfopenllm_v2/RLHFlow_LLaMA3-iterative-DPO-final/1762652579.849687 | 1762652579.849688 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | RLHFlow/LLaMA3-iterative-DPO-final | RLHFlow/LLaMA3-iterative-DPO-final | RLHFlow | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.53401086893886}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH",... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Sakalti | Sakalti/SakalFusion-7B-Alpha | 2329f6f2-228a-400b-9b2d-4ad6dd278b79 | 0.0.1 | hfopenllm_v2/Sakalti_SakalFusion-7B-Alpha/1762652579.866478 | 1762652579.8664792 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SakalFusion-7B-Alpha | Sakalti/SakalFusion-7B-Alpha | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5289653674472622}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-1.5B-Alpha-1.1 | e3f05df1-a653-41a0-983a-4a7d86b85c60 | 0.0.1 | hfopenllm_v2/Sakalti_SJT-1.5B-Alpha-1.1/1762652579.859199 | 1762652579.859199 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-1.5B-Alpha-1.1 | Sakalti/SJT-1.5B-Alpha-1.1 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3439429602344003}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.777} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-3.7B | e82f1a2e-f679-47b8-9fbb-a53116e2195b | 0.0.1 | hfopenllm_v2/Sakalti_SJT-3.7B/1762652579.860638 | 1762652579.8606389 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-3.7B | Sakalti/SJT-3.7B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.10776184966998675}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.783} |
HF Open LLM v2 | Sakalti | Sakalti/Magro-7B-v1.1 | 9e6c7958-689f-4437-b81a-c055d53ca33e | 0.0.1 | hfopenllm_v2/Sakalti_Magro-7B-v1.1/1762652579.857256 | 1762652579.857256 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Magro-7B-v1.1 | Sakalti/Magro-7B-v1.1 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1204016454119514}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-2B | f720d81c-04e1-4f8a-b452-ae52cc7d9fb2 | 0.0.1 | hfopenllm_v2/Sakalti_SJT-2B/1762652579.8602371 | 1762652579.860238 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-2B | Sakalti/SJT-2B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21507378200951255}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Gemma2ForCausalLM", "params_billions": 2.614} |
HF Open LLM v2 | Sakalti | Sakalti/Qwen2.5-1B-Instruct | da01b31f-dde8-45dd-b793-c8258a09ddee | 0.0.1 | hfopenllm_v2/Sakalti_Qwen2.5-1B-Instruct/1762652579.858331 | 1762652579.858331 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Qwen2.5-1B-Instruct | Sakalti/Qwen2.5-1B-Instruct | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17513198313807365}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.988} |
HF Open LLM v2 | Sakalti | Sakalti/Saka-7.2B | 07f036d7-af59-49a8-8346-8a9a9dd21439 | 0.0.1 | hfopenllm_v2/Sakalti_Saka-7.2B/1762652579.86556 | 1762652579.865563 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Saka-7.2B | Sakalti/Saka-7.2B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1544989516704566}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 7.292} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-2B-V1.1 | b4e467a7-3f2d-438a-8c42-1f7da1aafd20 | 0.0.1 | hfopenllm_v2/Sakalti_SJT-2B-V1.1/1762652579.860439 | 1762652579.860439 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-2B-V1.1 | Sakalti/SJT-2B-V1.1 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3977235956151899}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Gemma2ForCausalLM", "params_billions": 2.614} |
HF Open LLM v2 | Sakalti | Sakalti/SJTPass-2 | 7f508bd9-7f95-453d-9e96-747ce91a64b3 | 0.0.1 | hfopenllm_v2/Sakalti_SJTPass-2/1762652579.8624809 | 1762652579.8624818 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJTPass-2 | Sakalti/SJTPass-2 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.24002867945939}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH",... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.63} |
HF Open LLM v2 | Sakalti | Sakalti/mergekit-01 | dd01becb-c2c0-4593-ac1e-db2ff11aa17b | 0.0.1 | hfopenllm_v2/Sakalti_mergekit-01/1762652579.868608 | 1762652579.868609 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/mergekit-01 | Sakalti/mergekit-01 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6233870574520051}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Sakalti | Sakalti/ultiima-72B-v1.5 | 258aae52-b934-4ba1-bdb0-e15bd8277234 | 0.0.1 | hfopenllm_v2/Sakalti_ultiima-72B-v1.5/1762652579.8712351 | 1762652579.8712351 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/ultiima-72B-v1.5 | Sakalti/ultiima-72B-v1.5 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6549610588793291}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.706} |
HF Open LLM v2 | Sakalti | Sakalti/light-7b-beta | a66efce1-f6d2-4fad-964b-cc4e80012145 | 0.0.1 | hfopenllm_v2/Sakalti_light-7b-beta/1762652579.867865 | 1762652579.867866 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/light-7b-beta | Sakalti/light-7b-beta | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6233870574520051}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Sakalti | Sakalti/Tara-3.8B-v1.1 | cd884e16-7e4d-4d17-8bad-5819604e0384 | 0.0.1 | hfopenllm_v2/Sakalti_Tara-3.8B-v1.1/1762652579.866961 | 1762652579.866962 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Tara-3.8B-v1.1 | Sakalti/Tara-3.8B-v1.1 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.40621661635571393}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | Sakalti | Sakalti/SakaMoe-3x1.6B-Instruct | e806f2f4-0a10-49f6-a67e-dc1dd0a59ede | 0.0.1 | hfopenllm_v2/Sakalti_SakaMoe-3x1.6B-Instruct/1762652579.866188 | 1762652579.8661902 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SakaMoe-3x1.6B-Instruct | Sakalti/SakaMoe-3x1.6B-Instruct | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.23708094522533543}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2MoeForCausalLM", "params_billions": 1.572} |
HF Open LLM v2 | Sakalti | Sakalti/Anemoi-3B | b50b5452-b824-4fd6-b0e4-cdaea09139a2 | 0.0.1 | hfopenllm_v2/Sakalti_Anemoi-3B/1762652579.856576 | 1762652579.856576 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Anemoi-3B | Sakalti/Anemoi-3B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3803629924156793}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.397} |
HF Open LLM v2 | Sakalti | Sakalti/ultiima-14B-v0.2 | f3f888bb-5e99-4521-83b2-4e182f492220 | 0.0.1 | hfopenllm_v2/Sakalti_ultiima-14B-v0.2/1762652579.870035 | 1762652579.870036 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/ultiima-14B-v0.2 | Sakalti/ultiima-14B-v0.2 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7069930007934502}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-0.5B | 7763650a-8a37-41f2-aadd-b1db7b41d0b3 | 0.0.1 | hfopenllm_v2/Sakalti_SJT-0.5B/1762652579.858787 | 1762652579.858787 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-0.5B | Sakalti/SJT-0.5B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.24247662867857286}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.63} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-7B-V1.1-Multilingal | 03cb237a-0519-449c-b9c7-d9fbb4d119cd | 0.0.1 | hfopenllm_v2/Sakalti_SJT-7B-V1.1-Multilingal/1762652579.861463 | 1762652579.861464 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-7B-V1.1-Multilingal | Sakalti/SJT-7B-V1.1-Multilingal | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19494053555676716}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.613} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-8B | cb136400-7d0e-4194-9a45-1646ff8cac95 | 0.0.1 | hfopenllm_v2/Sakalti_SJT-8B/1762652579.861662 | 1762652579.8616629 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-8B | Sakalti/SJT-8B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6534871917623019}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 8.548} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-900M | ff057dd9-0102-485d-88d7-7e50145b5f7e | 0.0.1 | hfopenllm_v2/Sakalti_SJT-900M/1762652579.862072 | 1762652579.8620732 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-900M | Sakalti/SJT-900M | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2410027615615456}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.899} |
HF Open LLM v2 | Sakalti | Sakalti/SJTPass-4 | f814a3bd-b82e-4769-9ef7-a4670420bca0 | 0.0.1 | hfopenllm_v2/Sakalti_SJTPass-4/1762652579.8627222 | 1762652579.8627222 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJTPass-4 | Sakalti/SJTPass-4 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19129354557019818}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.167} |
HF Open LLM v2 | Sakalti | Sakalti/light-1.1-3B | 9da5b03b-0207-4e98-a5bf-5a658225e78f | 0.0.1 | hfopenllm_v2/Sakalti_light-1.1-3B/1762652579.867201 | 1762652579.867202 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/light-1.1-3B | Sakalti/light-1.1-3B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.27345110972220377}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.086} |
HF Open LLM v2 | Sakalti | Sakalti/model-3 | efd2a4d7-afcd-4653-ad4f-7d4f7206be95 | 0.0.1 | hfopenllm_v2/Sakalti_model-3/1762652579.869146 | 1762652579.869148 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/model-3 | Sakalti/model-3 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6263846593704703}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Sakalti | Sakalti/SakalFusion-7B-Beta | 537a91f9-b1f3-49bf-bef7-a9ef8578c284 | 0.0.1 | hfopenllm_v2/Sakalti_SakalFusion-7B-Beta/1762652579.866734 | 1762652579.8667352 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SakalFusion-7B-Beta | Sakalti/SakalFusion-7B-Beta | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18090222830977362}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.613} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-14B | 1169b5fd-9418-4986-940a-276d163431c0 | 0.0.1 | hfopenllm_v2/Sakalti_SJT-14B/1762652579.8596292 | 1762652579.85963 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-14B | Sakalti/SJT-14B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5494233079340594}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-7B-V1.1 | b1527426-9cc0-4eb5-af52-30e36e0e04fd | 0.0.1 | hfopenllm_v2/Sakalti_SJT-7B-V1.1/1762652579.861262 | 1762652579.861263 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-7B-V1.1 | Sakalti/SJT-7B-V1.1 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4702888336281067}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.613} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-4B | 5115cea0-d3bf-486b-9609-36698e845653 | 0.0.1 | hfopenllm_v2/Sakalti_SJT-4B/1762652579.8608499 | 1762652579.860851 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-4B | Sakalti/SJT-4B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4077403511571519}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | Sakalti | Sakalti/Saba1.5-1.5B | a76090d4-a0fb-45c8-b28c-fa225ec3d11c | 0.0.1 | hfopenllm_v2/Sakalti_Saba1.5-1.5B/1762652579.8637571 | 1762652579.863758 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Saba1.5-1.5B | Sakalti/Saba1.5-1.5B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3332768166243345}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.544} |
HF Open LLM v2 | Sakalti | Sakalti/ultiima-14B-v0.4 | 688f9751-e261-41c6-a7a4-2dc33a702e09 | 0.0.1 | hfopenllm_v2/Sakalti_ultiima-14B-v0.4/1762652579.8704672 | 1762652579.8704839 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/ultiima-14B-v0.4 | Sakalti/ultiima-14B-v0.4 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3008284684636764}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | Sakalti | Sakalti/llama-3-yanyuedao-8b-instruct | cb550de6-4cd6-411e-9426-dc12421404ad | 0.0.1 | hfopenllm_v2/Sakalti_llama-3-yanyuedao-8b-instruct/1762652579.8681011 | 1762652579.8681011 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/llama-3-yanyuedao-8b-instruct | Sakalti/llama-3-yanyuedao-8b-instruct | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21857116894284942}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Sakalti | Sakalti/light-3B | a1593642-8d60-4680-90aa-8c3789d536d6 | 0.0.1 | hfopenllm_v2/Sakalti_light-3B/1762652579.8674219 | 1762652579.867423 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/light-3B | Sakalti/light-3B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5337360425892188}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.397} |
HF Open LLM v2 | Sakalti | Sakalti/SJTPass-5 | 5d5bda4e-8994-4cef-9772-d4bd435e9644 | 0.0.1 | hfopenllm_v2/Sakalti_SJTPass-5/1762652579.862921 | 1762652579.862922 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJTPass-5 | Sakalti/SJTPass-5 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.24247662867857286}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.809} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-1.5B-Alpha | 21472871-fe74-447a-894c-80d77ae4ad0a | 0.0.1 | hfopenllm_v2/Sakalti_SJT-1.5B-Alpha/1762652579.858988 | 1762652579.858989 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-1.5B-Alpha | Sakalti/SJT-1.5B-Alpha | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3448671746521452}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.777} |
HF Open LLM v2 | Sakalti | Sakalti/Saka-14B | 53556d59-3b32-44bc-9932-c52f05939b57 | 0.0.1 | hfopenllm_v2/Sakalti_Saka-14B/1762652579.8649821 | 1762652579.864983 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Saka-14B | Sakalti/Saka-14B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7174341857382855}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | Sakalti | Sakalti/ultiima-72B | cce8480a-353b-4f9b-8f6f-b2f1e9ae601a | 0.0.1 | hfopenllm_v2/Sakalti_ultiima-72B/1762652579.8710139 | 1762652579.8710148 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/ultiima-72B | Sakalti/ultiima-72B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7140121544169471}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.706} |
HF Open LLM v2 | Sakalti | Sakalti/magro-7B | c2c87be8-4137-4bcc-8cbe-4589d193e94d | 0.0.1 | hfopenllm_v2/Sakalti_magro-7B/1762652579.868387 | 1762652579.8683882 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/magro-7B | Sakalti/magro-7B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.13439008497453425}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | Sakalti | Sakalti/Saba1-7B | 1200ed26-8450-4788-a1bf-20f2c9b9b2c0 | 0.0.1 | hfopenllm_v2/Sakalti_Saba1-7B/1762652579.863542 | 1762652579.863542 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Saba1-7B | Sakalti/Saba1-7B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.45847351693506566}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-1.7B | 6e2f01c1-ba87-4687-9db1-a0c0004bdfe1 | 0.0.1 | hfopenllm_v2/Sakalti_SJT-1.7B/1762652579.859416 | 1762652579.8594172 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-1.7B | Sakalti/SJT-1.7B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17762980004166723}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.684} |
HF Open LLM v2 | Sakalti | Sakalti/Oxyge1-33B | ee17e3a4-2036-4e57-9ada-51fe6d23ffac | 0.0.1 | hfopenllm_v2/Sakalti_Oxyge1-33B/1762652579.8578959 | 1762652579.857897 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Oxyge1-33B | Sakalti/Oxyge1-33B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4548265269484966}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 32.764} |
HF Open LLM v2 | Sakalti | Sakalti/Saba1.5-Pro-3B | 14e1dd44-92f1-4d97-be67-fa98c9802ff1 | 0.0.1 | hfopenllm_v2/Sakalti_Saba1.5-Pro-3B/1762652579.863965 | 1762652579.863966 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Saba1.5-Pro-3B | Sakalti/Saba1.5-Pro-3B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.23860468002677343}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 2.9} |
HF Open LLM v2 | Sakalti | Sakalti/mergekit-della_linear-vmeykci | a4bd1768-2382-47fe-a8bd-6e42bda06d2f | 0.0.1 | hfopenllm_v2/Sakalti_mergekit-della_linear-vmeykci/1762652579.868854 | 1762652579.868856 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/mergekit-della_linear-vmeykci | Sakalti/mergekit-della_linear-vmeykci | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1126078804239418}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.613} |
HF Open LLM v2 | Sakalti | Sakalti/Saba1-1.8B | d8cc8e9e-b672-4b26-a454-f97cd7a08648 | 0.0.1 | hfopenllm_v2/Sakalti_Saba1-1.8B/1762652579.863334 | 1762652579.863334 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Saba1-1.8B | Sakalti/Saba1-1.8B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3332768166243345}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.777} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-2.4B | 30b98827-5afb-4bfe-b765-9c81cb4580f4 | 0.0.1 | hfopenllm_v2/Sakalti_SJT-2.4B/1762652579.859841 | 1762652579.859841 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-2.4B | Sakalti/SJT-2.4B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.28042039566128985}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 2.432} |
HF Open LLM v2 | Sakalti | Sakalti/Euphrates-14B | db8c1ba2-4029-45c5-b8a6-5343356266eb | 0.0.1 | hfopenllm_v2/Sakalti_Euphrates-14B/1762652579.856813 | 1762652579.8568141 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Euphrates-14B | Sakalti/Euphrates-14B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.26468326263203856}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | Sakalti | Sakalti/Sailor-japanese | 8449b01f-c489-4008-97d4-aa3f0394cda4 | 0.0.1 | hfopenllm_v2/Sakalti_Sailor-japanese/1762652579.864587 | 1762652579.864588 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Sailor-japanese | Sakalti/Sailor-japanese | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16046866757979938}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-24B-Alpha | f86649f8-8962-4496-8cd8-fed702a7e63b | 0.0.1 | hfopenllm_v2/Sakalti_SJT-24B-Alpha/1762652579.860041 | 1762652579.860041 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-24B-Alpha | Sakalti/SJT-24B-Alpha | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3206370208823699}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 24.125} |
HF Open LLM v2 | Sakalti | Sakalti/Saka-7.6B | 10923a84-a611-4830-b84c-0e91c0628541 | 0.0.1 | hfopenllm_v2/Sakalti_Saka-7.6B/1762652579.865891 | 1762652579.8658922 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Saka-7.6B | Sakalti/Saka-7.6B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.45242844541372446}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Sakalti | Sakalti/ultiima-32B | 18f686ca-453d-4a0c-9f1a-e2f4ba53399c | 0.0.1 | hfopenllm_v2/Sakalti_ultiima-32B/1762652579.870782 | 1762652579.870784 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/ultiima-32B | Sakalti/ultiima-32B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6854357549080883}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 32.764} |
HF Open LLM v2 | Sakalti | Sakalti/SJT-8B-V1.1 | 0cf37c9e-9218-4366-8065-befea0d2b749 | 0.0.1 | hfopenllm_v2/Sakalti_SJT-8B-V1.1/1762652579.8618612 | 1762652579.861862 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/SJT-8B-V1.1 | Sakalti/SJT-8B-V1.1 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4620706392372239}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 8.545} |
HF Open LLM v2 | Sakalti | Sakalti/ultiima-14B-v0.3 | 5cd3794f-990f-4965-9fbc-7faf3216e808 | 0.0.1 | hfopenllm_v2/Sakalti_ultiima-14B-v0.3/1762652579.870242 | 1762652579.870243 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/ultiima-14B-v0.3 | Sakalti/ultiima-14B-v0.3 | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7040452665593957}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | Sakalti | Sakalti/Neptuno-3B | 4c2150fc-f473-4bdc-8823-960778ccbc75 | 0.0.1 | hfopenllm_v2/Sakalti_Neptuno-3B/1762652579.857454 | 1762652579.857455 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Neptuno-3B | Sakalti/Neptuno-3B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.42962229107656574}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.397} |
HF Open LLM v2 | Sakalti | Sakalti/Saka-24B | a5e13aa9-bf5f-4201-bc93-504521141f43 | 0.0.1 | hfopenllm_v2/Sakalti_Saka-24B/1762652579.865175 | 1762652579.865176 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sakalti/Saka-24B | Sakalti/Saka-24B | Sakalti | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.38186123928952953}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 23.572} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.