_leaderboard stringclasses 1
value | _developer stringclasses 559
values | _model stringlengths 9 102 | _uuid stringlengths 36 36 | schema_version stringclasses 1
value | evaluation_id stringlengths 35 133 | retrieved_timestamp stringlengths 13 18 | source_data stringclasses 1
value | evaluation_source_name stringclasses 1
value | evaluation_source_type stringclasses 1
value | source_organization_name stringclasses 1
value | source_organization_url null | source_organization_logo_url null | evaluator_relationship stringclasses 1
value | model_name stringlengths 4 102 | model_id stringlengths 9 102 | model_developer stringclasses 559
values | model_inference_platform stringclasses 1
value | evaluation_results stringlengths 1.35k 1.41k | additional_details stringclasses 660
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
HF Open LLM v2 | ai4bharat | ai4bharat/Airavata | 350b0559-6331-4b8b-82e2-0463baea9d8a | 0.0.1 | hfopenllm_v2/ai4bharat_Airavata/1762652579.978861 | 1762652579.978862 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ai4bharat/Airavata | ai4bharat/Airavata | ai4bharat | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.05585402288150995}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 6.87} |
HF Open LLM v2 | nlpguy | nlpguy/Miisce-one | e557a750-53b2-4181-a19c-dfdeee11ee61 | 0.0.1 | hfopenllm_v2/nlpguy_Miisce-one/1762652580.4081762 | 1762652580.408177 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nlpguy/Miisce-one | nlpguy/Miisce-one | nlpguy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6065761069517768}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | nlpguy | nlpguy/StarFusion-alpha1 | 1d5c35ef-ec57-42a3-8459-6db62627c6d2 | 0.0.1 | hfopenllm_v2/nlpguy_StarFusion-alpha1/1762652580.409272 | 1762652580.409272 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nlpguy/StarFusion-alpha1 | nlpguy/StarFusion-alpha1 | nlpguy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5660092997690572}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | nlpguy | nlpguy/Lion-Lamarck-v.1.0.8 | 67582e10-cebf-4938-bfca-2eb6883e2c39 | 0.0.1 | hfopenllm_v2/nlpguy_Lion-Lamarck-v.1.0.8/1762652580.40752 | 1762652580.407521 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nlpguy/Lion-Lamarck-v.1.0.8 | nlpguy/Lion-Lamarck-v.1.0.8 | nlpguy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.45090471061228654}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | nlpguy | nlpguy/Lion-Lamarck-v.1.1.0 | 027ad81a-1271-4c25-9966-02370f6ee49d | 0.0.1 | hfopenllm_v2/nlpguy_Lion-Lamarck-v.1.1.0/1762652580.4079711 | 1762652580.4079711 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nlpguy/Lion-Lamarck-v.1.1.0 | nlpguy/Lion-Lamarck-v.1.1.0 | nlpguy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3657750324694034}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | nlpguy | nlpguy/Lion-Lamarck-v.1.0.9 | f5fa6816-051d-4d86-bef5-ba9731b8bd9a | 0.0.1 | hfopenllm_v2/nlpguy_Lion-Lamarck-v.1.0.9/1762652580.407768 | 1762652580.4077692 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nlpguy/Lion-Lamarck-v.1.0.9 | nlpguy/Lion-Lamarck-v.1.0.9 | nlpguy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.34089549063152436}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | nlpguy | nlpguy/StableProse | bedab076-13e7-468a-b8e8-dddb57d78583 | 0.0.1 | hfopenllm_v2/nlpguy_StableProse/1762652580.40907 | 1762652580.40907 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nlpguy/StableProse | nlpguy/StableProse | nlpguy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19723888172271792}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | utkmst | utkmst/chimera-beta-test2-lora-merged | 00620da3-d3ee-442a-a319-248906d959c0 | 0.0.1 | hfopenllm_v2/utkmst_chimera-beta-test2-lora-merged/1762652580.581129 | 1762652580.581131 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | utkmst/chimera-beta-test2-lora-merged | utkmst/chimera-beta-test2-lora-merged | utkmst | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6054269338688014}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | mistralai | mistralai/Mistral-Small-Instruct-2409 | a85d1dbd-465b-42c8-baf5-0e7a7ca00725 | 0.0.1 | hfopenllm_v2/mistralai_Mistral-Small-Instruct-2409/1762652580.364117 | 1762652580.364118 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mistral-Small-Instruct-2409 | mistralai/Mistral-Small-Instruct-2409 | mistralai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6282829558903709}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 22.247} |
HF Open LLM v2 | mistralai | mistralai/Mistral-Small-Instruct-2409 | 15f66094-73f1-4302-adad-69522872682d | 0.0.1 | hfopenllm_v2/mistralai_Mistral-Small-Instruct-2409/1762652580.363916 | 1762652580.363917 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mistral-Small-Instruct-2409 | mistralai/Mistral-Small-Instruct-2409 | mistralai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.666975846310013}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 22.05} |
HF Open LLM v2 | mistralai | mistralai/Mistral-7B-Instruct-v0.2 | fb55e940-f03d-4d79-9363-ec17eebf9596 | 0.0.1 | hfopenllm_v2/mistralai_Mistral-7B-Instruct-v0.2/1762652580.362234 | 1762652580.3622348 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mistral-7B-Instruct-v0.2 | mistralai/Mistral-7B-Instruct-v0.2 | mistralai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5496227786717023}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistralai | mistralai/Mistral-7B-Instruct-v0.1 | ef779e6f-1c12-4237-aa45-e6315ed01d92 | 0.0.1 | hfopenllm_v2/mistralai_Mistral-7B-Instruct-v0.1/1762652580.3620229 | 1762652580.3620229 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mistral-7B-Instruct-v0.1 | mistralai/Mistral-7B-Instruct-v0.1 | mistralai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4487060998151571}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | mistralai | mistralai/Ministral-8B-Instruct-2410 | d0cfd22e-6bad-4784-a172-76892d44f70b | 0.0.1 | hfopenllm_v2/mistralai_Ministral-8B-Instruct-2410/1762652580.361781 | 1762652580.361782 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Ministral-8B-Instruct-2410 | mistralai/Ministral-8B-Instruct-2410 | mistralai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5896399331551394}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 8.02} |
HF Open LLM v2 | mistralai | mistralai/Mistral-7B-Instruct-v0.3 | ddc775e5-a4cc-49bd-ace3-113f325134c0 | 0.0.1 | hfopenllm_v2/mistralai_Mistral-7B-Instruct-v0.3/1762652580.362444 | 1762652580.362445 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mistral-7B-Instruct-v0.3 | mistralai/Mistral-7B-Instruct-v0.3 | mistralai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5465254413844156}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.248} |
HF Open LLM v2 | mistralai | mistralai/Mixtral-8x7B-Instruct-v0.1 | 2e1de889-2df9-4c81-b5ce-c00c602704b7 | 0.0.1 | hfopenllm_v2/mistralai_Mixtral-8x7B-Instruct-v0.1/1762652580.364703 | 1762652580.364704 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mixtral-8x7B-Instruct-v0.1 | mistralai/Mixtral-8x7B-Instruct-v0.1 | mistralai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5599143605633053}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 46.703} |
HF Open LLM v2 | mistralai | mistralai/Mistral-Large-Instruct-2411 | 1f2c9c0c-7e71-4886-9980-300a7ae5c55e | 0.0.1 | hfopenllm_v2/mistralai_Mistral-Large-Instruct-2411/1762652580.3630579 | 1762652580.363059 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mistral-Large-Instruct-2411 | mistralai/Mistral-Large-Instruct-2411 | mistralai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8400577135334246}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 122.61} |
HF Open LLM v2 | mistralai | mistralai/Mistral-Nemo-Instruct-2407 | 3758a033-b197-403b-ab9e-7457856f3ebc | 0.0.1 | hfopenllm_v2/mistralai_Mistral-Nemo-Instruct-2407/1762652580.363499 | 1762652580.363499 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mistral-Nemo-Instruct-2407 | mistralai/Mistral-Nemo-Instruct-2407 | mistralai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6380248850826917}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | mistralai | mistralai/Mixtral-8x22B-Instruct-v0.1 | ee88881e-cdeb-4a55-b784-6b41b983d7aa | 0.0.1 | hfopenllm_v2/mistralai_Mixtral-8x22B-Instruct-v0.1/1762652580.3642921 | 1762652580.3642921 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mistralai/Mixtral-8x22B-Instruct-v0.1 | mistralai/Mixtral-8x22B-Instruct-v0.1 | mistralai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7183584001560305}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 140.621} |
HF Open LLM v2 | divyanshukunwar | divyanshukunwar/SASTRI_1_9B | f0ccf0c5-269f-46e1-a13e-b54f2903779b | 0.0.1 | hfopenllm_v2/divyanshukunwar_SASTRI_1_9B/1762652580.1269271 | 1762652580.1269279 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | divyanshukunwar/SASTRI_1_9B | divyanshukunwar/SASTRI_1_9B | divyanshukunwar | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4207292206899914}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Gemma2ForCausalLM", "params_billions": 5.211} |
HF Open LLM v2 | 0-hero | 0-hero/Matter-0.2-7B-DPO | 40e80d5e-db72-46b7-bd14-b7d005df4be8 | 0.0.1 | hfopenllm_v2/0-hero_Matter-0.2-7B-DPO/1762652579.4626381 | 1762652579.462642 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | 0-hero/Matter-0.2-7B-DPO | 0-hero/Matter-0.2-7B-DPO | 0-hero | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3302792147058693}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | KingNish | KingNish/Reasoning-0.5b | 98f5e59e-0bdb-405b-a18e-3addd8920951 | 0.0.1 | hfopenllm_v2/KingNish_Reasoning-0.5b/1762652579.6997252 | 1762652579.699726 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | KingNish/Reasoning-0.5b | KingNish/Reasoning-0.5b | KingNish | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.217421995859874}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | OpenLLM-France | OpenLLM-France/Lucie-7B-Instruct-v1.1 | e94a0550-93fa-448a-a4a4-187fd1b7d24e | 0.0.1 | hfopenllm_v2/OpenLLM-France_Lucie-7B-Instruct-v1.1/1762652579.807442 | 1762652579.807442 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | OpenLLM-France/Lucie-7B-Instruct-v1.1 | OpenLLM-France/Lucie-7B-Instruct-v1.1 | OpenLLM-France | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3038759380665523}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 6.707} |
HF Open LLM v2 | OpenLLM-France | OpenLLM-France/Lucie-7B-Instruct | af17be77-0ae3-4b90-ba85-a4886450cd43 | 0.0.1 | hfopenllm_v2/OpenLLM-France_Lucie-7B-Instruct/1762652579.806944 | 1762652579.806945 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | OpenLLM-France/Lucie-7B-Instruct | OpenLLM-France/Lucie-7B-Instruct | OpenLLM-France | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.279645784296777}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 6.707} |
HF Open LLM v2 | OpenLLM-France | OpenLLM-France/Lucie-7B | 01e4cd19-4f1f-4c30-b80f-e1d287d5d7c2 | 0.0.1 | hfopenllm_v2/OpenLLM-France_Lucie-7B/1762652579.806693 | 1762652579.8066938 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | OpenLLM-France/Lucie-7B | OpenLLM-France/Lucie-7B | OpenLLM-France | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.24964538535530173}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 6.707} |
HF Open LLM v2 | OpenLLM-France | OpenLLM-France/Lucie-7B-Instruct-human-data | 26787f2b-8f30-4cc8-b39e-447b8c53aa85 | 0.0.1 | hfopenllm_v2/OpenLLM-France_Lucie-7B-Instruct-human-data/1762652579.8072178 | 1762652579.807219 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | OpenLLM-France/Lucie-7B-Instruct-human-data | OpenLLM-France/Lucie-7B-Instruct-human-data | OpenLLM-France | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.29460830596151544}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 6.707} |
HF Open LLM v2 | skymizer | skymizer/Llama2-7b-sft-chat-custom-template-dpo | 24473e8a-2631-44b5-9cc2-81f0669d8032 | 0.0.1 | hfopenllm_v2/skymizer_Llama2-7b-sft-chat-custom-template-dpo/1762652580.517826 | 1762652580.517826 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | skymizer/Llama2-7b-sft-chat-custom-template-dpo | skymizer/Llama2-7b-sft-chat-custom-template-dpo | skymizer | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2352823840742563}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 6.738} |
HF Open LLM v2 | monsterapi | monsterapi/Llama-3_1-8B-Instruct-orca-ORPO | b70a3980-7b0b-4bb1-878f-c2d49f9df09e | 0.0.1 | hfopenllm_v2/monsterapi_Llama-3_1-8B-Instruct-orca-ORPO/1762652580.3723478 | 1762652580.3723478 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | monsterapi/Llama-3_1-8B-Instruct-orca-ORPO | monsterapi/Llama-3_1-8B-Instruct-orca-ORPO | monsterapi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.22728914834860392}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "?", "params_billions": 16.061} |
HF Open LLM v2 | failspy | failspy/Llama-3-8B-Instruct-abliterated | 8aa6c90e-a6ee-4dfe-8bf4-b5d256be9cd6 | 0.0.1 | hfopenllm_v2/failspy_Llama-3-8B-Instruct-abliterated/1762652580.1499012 | 1762652580.149902 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | failspy/Llama-3-8B-Instruct-abliterated | failspy/Llama-3-8B-Instruct-abliterated | failspy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5908888416069362}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | failspy | failspy/Meta-Llama-3-8B-Instruct-abliterated-v3 | c598dbff-4ab5-4405-b75d-13571ae3d862 | 0.0.1 | hfopenllm_v2/failspy_Meta-Llama-3-8B-Instruct-abliterated-v3/1762652580.150389 | 1762652580.15039 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | failspy/Meta-Llama-3-8B-Instruct-abliterated-v3 | failspy/Meta-Llama-3-8B-Instruct-abliterated-v3 | failspy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7244533393617822}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | failspy | failspy/Llama-3-8B-Instruct-MopeyMule | f5bfa461-15bf-4e32-8471-74f456c62fd9 | 0.0.1 | hfopenllm_v2/failspy_Llama-3-8B-Instruct-MopeyMule/1762652580.1496441 | 1762652580.1496441 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | failspy/Llama-3-8B-Instruct-MopeyMule | failspy/Llama-3-8B-Instruct-MopeyMule | failspy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6750444376476638}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | failspy | failspy/Phi-3-medium-4k-instruct-abliterated-v3 | 264bc4a6-f0ad-4eef-a519-6d97f8f6ab91 | 0.0.1 | hfopenllm_v2/failspy_Phi-3-medium-4k-instruct-abliterated-v3/1762652580.1505978 | 1762652580.150599 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | failspy/Phi-3-medium-4k-instruct-abliterated-v3 | failspy/Phi-3-medium-4k-instruct-abliterated-v3 | failspy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6319299458769398}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 13.96} |
HF Open LLM v2 | failspy | failspy/Meta-Llama-3-70B-Instruct-abliterated-v3.5 | e0329607-d832-4252-ad71-81e8a8c4bb31 | 0.0.1 | hfopenllm_v2/failspy_Meta-Llama-3-70B-Instruct-abliterated-v3.5/1762652580.1501682 | 1762652580.1501691 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | failspy/Meta-Llama-3-70B-Instruct-abliterated-v3.5 | failspy/Meta-Llama-3-70B-Instruct-abliterated-v3.5 | failspy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7746867201248244}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | failspy | failspy/llama-3-70B-Instruct-abliterated | f31f7ad3-9018-4891-be05-12787728904c | 0.0.1 | hfopenllm_v2/failspy_llama-3-70B-Instruct-abliterated/1762652580.1508029 | 1762652580.150804 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | failspy/llama-3-70B-Instruct-abliterated | failspy/llama-3-70B-Instruct-abliterated | failspy | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8023389052159382}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | OpenAssistant | OpenAssistant/oasst-sft-1-pythia-12b | ba1129fd-f158-47ad-b194-7cff794b9ef2 | 0.0.1 | hfopenllm_v2/OpenAssistant_oasst-sft-1-pythia-12b/1762652579.799746 | 1762652579.799747 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | OpenAssistant/oasst-sft-1-pythia-12b | OpenAssistant/oasst-sft-1-pythia-12b | OpenAssistant | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.10553885911603435}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GPTNeoXForCausalLM", "params_billions": 12.0} |
HF Open LLM v2 | Weyaxi | Weyaxi/Bagel-Hermes-2x34B | 5b614673-6566-4b82-bf7c-13268ebb1577 | 0.0.1 | hfopenllm_v2/Weyaxi_Bagel-Hermes-2x34B/1762652579.948213 | 1762652579.948214 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Weyaxi/Bagel-Hermes-2x34B | Weyaxi/Bagel-Hermes-2x34B | Weyaxi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5431532777474878}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 60.814} |
HF Open LLM v2 | Weyaxi | Weyaxi/Bagel-Hermes-34B-Slerp | 28439ab5-0e5f-4dae-a98a-e0c1b743a8b0 | 0.0.1 | hfopenllm_v2/Weyaxi_Bagel-Hermes-34B-Slerp/1762652579.948482 | 1762652579.948482 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Weyaxi/Bagel-Hermes-34B-Slerp | Weyaxi/Bagel-Hermes-34B-Slerp | Weyaxi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4602720780861448}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 34.389} |
HF Open LLM v2 | Weyaxi | Weyaxi/SauerkrautLM-UNA-SOLAR-Instruct | 8ddec5bb-ab90-4c98-8482-a412e7735246 | 0.0.1 | hfopenllm_v2/Weyaxi_SauerkrautLM-UNA-SOLAR-Instruct/1762652579.950165 | 1762652579.950166 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Weyaxi/SauerkrautLM-UNA-SOLAR-Instruct | Weyaxi/SauerkrautLM-UNA-SOLAR-Instruct | Weyaxi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4573243438520902}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 10.732} |
HF Open LLM v2 | Weyaxi | Weyaxi/Einstein-v4-7B | 035c5e35-0ebe-4e91-a598-8d01688462a3 | 0.0.1 | hfopenllm_v2/Weyaxi_Einstein-v4-7B/1762652579.948704 | 1762652579.948705 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Weyaxi/Einstein-v4-7B | Weyaxi/Einstein-v4-7B | Weyaxi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.47081299839980145}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | Nekochu | Nekochu/Llama-3.1-8B-french-DPO | ebc2a3b7-30e9-4608-a8c0-ea90a308c0e5 | 0.0.1 | hfopenllm_v2/Nekochu_Llama-3.1-8B-french-DPO/1762652579.770777 | 1762652579.7707782 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Nekochu/Llama-3.1-8B-french-DPO | Nekochu/Llama-3.1-8B-french-DPO | Nekochu | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.46564227361179444}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Nekochu | Nekochu/Luminia-13B-v3 | 172f121a-3843-4b01-94e1-a95001909bb8 | 0.0.1 | hfopenllm_v2/Nekochu_Luminia-13B-v3/1762652579.771023 | 1762652579.771023 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Nekochu/Luminia-13B-v3 | Nekochu/Luminia-13B-v3 | Nekochu | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.25231829323971505}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 13.016} |
HF Open LLM v2 | Nekochu | Nekochu/Luminia-8B-RP | fd23ba4a-a0ce-474b-9aa4-b5295d872028 | 0.0.1 | hfopenllm_v2/Nekochu_Luminia-8B-RP/1762652579.7713962 | 1762652579.7713978 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Nekochu/Luminia-8B-RP | Nekochu/Luminia-8B-RP | Nekochu | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5574165436597118}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | shivam9980 | shivam9980/NEPALI-LLM | 234f5f98-a5fc-417a-8463-186bf600993a | 0.0.1 | hfopenllm_v2/shivam9980_NEPALI-LLM/1762652580.51522 | 1762652580.5152209 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | shivam9980/NEPALI-LLM | shivam9980/NEPALI-LLM | shivam9980 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.041666112581284324}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on B... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.273} |
HF Open LLM v2 | mattshumer | mattshumer/ref_70_e3 | 8ab597da-85ec-45d5-b5e2-f51ca8a2f3c9 | 0.0.1 | hfopenllm_v2/mattshumer_ref_70_e3/1762652580.342239 | 1762652580.34224 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mattshumer/ref_70_e3 | mattshumer/ref_70_e3 | mattshumer | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6294321289733462}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | THUDM | THUDM/glm-4-9b-chat-1m-hf | 077f7956-8c9b-47ef-8c4d-40455bbb0027 | 0.0.1 | hfopenllm_v2/THUDM_glm-4-9b-chat-1m-hf/1762652579.9096901 | 1762652579.9096909 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | THUDM/glm-4-9b-chat-1m-hf | THUDM/glm-4-9b-chat-1m-hf | THUDM | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5341106043076814}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "GlmForCausalLM", "params_billions": 9.484} |
HF Open LLM v2 | THUDM | THUDM/glm-4-9b | bd038a6c-1241-401d-962d-e033434ba735 | 0.0.1 | hfopenllm_v2/THUDM_glm-4-9b/1762652579.9090161 | 1762652579.9090161 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | THUDM/glm-4-9b | THUDM/glm-4-9b | THUDM | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1426082793654171}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "ChatGLMModelM", "params_billions": 9.0} |
HF Open LLM v2 | THUDM | THUDM/glm-4-9b-chat-hf | 0af9353e-10d5-42e3-8bc9-4c736720ff30 | 0.0.1 | hfopenllm_v2/THUDM_glm-4-9b-chat-hf/1762652579.909895 | 1762652579.909896 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | THUDM/glm-4-9b-chat-hf | THUDM/glm-4-9b-chat-hf | THUDM | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6513140688927601}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "GlmForCausalLM", "params_billions": 9.4} |
HF Open LLM v2 | THUDM | THUDM/glm-4-9b-chat | e7c5d8ef-d480-4ab9-b698-409e5ea76cf8 | 0.0.1 | hfopenllm_v2/THUDM_glm-4-9b-chat/1762652579.909267 | 1762652579.909267 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | THUDM/glm-4-9b-chat | THUDM/glm-4-9b-chat | THUDM | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.0}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH", "lower_is_be... | {"precision": "bfloat16", "architecture": "ChatGLMModelM", "params_billions": 9.0} |
HF Open LLM v2 | THUDM | THUDM/glm-4-9b-chat-1m | f0c306f0-683e-4582-81b7-f0a2c372060f | 0.0.1 | hfopenllm_v2/THUDM_glm-4-9b-chat-1m/1762652579.909478 | 1762652579.909479 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | THUDM/glm-4-9b-chat-1m | THUDM/glm-4-9b-chat-1m | THUDM | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.0}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH", "lower_is_be... | {"precision": "bfloat16", "architecture": "ChatGLMModel", "params_billions": 9.484} |
HF Open LLM v2 | Parissa3 | Parissa3/test-model | 53cb44c7-f7bc-40fa-88e7-511b9dfab004 | 0.0.1 | hfopenllm_v2/Parissa3_test-model/1762652579.811859 | 1762652579.81186 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Parissa3/test-model | Parissa3/test-model | Parissa3 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3882564927725103}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | inflatebot | inflatebot/MN-12B-Mag-Mell-R1 | 43f7613d-bd9f-480d-a2ed-dcabf3169944 | 0.0.1 | hfopenllm_v2/inflatebot_MN-12B-Mag-Mell-R1/1762652580.2261078 | 1762652580.226109 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | inflatebot/MN-12B-Mag-Mell-R1 | inflatebot/MN-12B-Mag-Mell-R1 | inflatebot | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.46129602787271107}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | meetkai | meetkai/functionary-small-v3.1 | 7312a4c6-85e2-4cb3-9c3e-1dfc039d1c3a | 0.0.1 | hfopenllm_v2/meetkai_functionary-small-v3.1/1762652580.345532 | 1762652580.345533 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | meetkai/functionary-small-v3.1 | meetkai/functionary-small-v3.1 | meetkai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6274584768414474}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | natong19 | natong19/Qwen2-7B-Instruct-abliterated | 7c8605a5-2f0d-4cc7-b840-d77cb5fdf849 | 0.0.1 | hfopenllm_v2/natong19_Qwen2-7B-Instruct-abliterated/1762652580.375325 | 1762652580.375325 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | natong19/Qwen2-7B-Instruct-abliterated | natong19/Qwen2-7B-Instruct-abliterated | natong19 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5836945970026197}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | natong19 | natong19/Mistral-Nemo-Instruct-2407-abliterated | 5256f7b6-f830-4733-a092-01470607558d | 0.0.1 | hfopenllm_v2/natong19_Mistral-Nemo-Instruct-2407-abliterated/1762652580.375077 | 1762652580.375078 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | natong19/Mistral-Nemo-Instruct-2407-abliterated | natong19/Mistral-Nemo-Instruct-2407-abliterated | natong19 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6392239258500778}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | shadowml | shadowml/BeagSake-7B | 2a71923c-8697-4b62-94fa-4c16874df7a7 | 0.0.1 | hfopenllm_v2/shadowml_BeagSake-7B/1762652580.514317 | 1762652580.514318 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | shadowml/BeagSake-7B | shadowml/BeagSake-7B | shadowml | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5215960318621258}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | shadowml | shadowml/Mixolar-4x7b | 65a2c055-9bb5-458d-8a65-89b363b47a3a | 0.0.1 | hfopenllm_v2/shadowml_Mixolar-4x7b/1762652580.5145578 | 1762652580.514559 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | shadowml/Mixolar-4x7b | shadowml/Mixolar-4x7b | shadowml | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3893303102434873}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MixtralForCausalLM", "params_billions": 36.099} |
HF Open LLM v2 | Ateron | Ateron/Way_of_MagPicaro | 0a5e585d-1a90-4849-9df5-670a56b9f161 | 0.0.1 | hfopenllm_v2/Ateron_Way_of_MagPicaro/1762652579.484595 | 1762652579.484596 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Ateron/Way_of_MagPicaro | Ateron/Way_of_MagPicaro | Ateron | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2637091805298829}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | Ateron | Ateron/Lotus-Magpic | bedab846-a6b2-4c51-9690-27deb7a76fe7 | 0.0.1 | hfopenllm_v2/Ateron_Lotus-Magpic/1762652579.484373 | 1762652579.484374 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Ateron/Lotus-Magpic | Ateron/Lotus-Magpic | Ateron | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6286076499244228}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | Ateron | Ateron/Glowing-Forest-12B | 13716fd0-049a-4e9a-90ca-af9db59c1703 | 0.0.1 | hfopenllm_v2/Ateron_Glowing-Forest-12B/1762652579.484101 | 1762652579.4841018 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Ateron/Glowing-Forest-12B | Ateron/Glowing-Forest-12B | Ateron | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3591803082487799}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | Pretergeek | Pretergeek/OpenChat-3.5-0106_8.11B_36Layers-Interleaved | c2e26b8a-3a12-4cb8-888e-96affc8cbac9 | 0.0.1 | hfopenllm_v2/Pretergeek_OpenChat-3.5-0106_8.11B_36Layers-Interleaved/1762652579.8163 | 1762652579.8163 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Pretergeek/OpenChat-3.5-0106_8.11B_36Layers-Interleaved | Pretergeek/OpenChat-3.5-0106_8.11B_36Layers-Interleaved | Pretergeek | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5960595663949432}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 8.114} |
HF Open LLM v2 | Pretergeek | Pretergeek/OpenChat-3.5-0106_8.11B_36Layers-Appended | 349bccfd-1816-4845-a1b9-2d9f4936adea | 0.0.1 | hfopenllm_v2/Pretergeek_OpenChat-3.5-0106_8.11B_36Layers-Appended/1762652579.8160908 | 1762652579.8160908 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Pretergeek/OpenChat-3.5-0106_8.11B_36Layers-Appended | Pretergeek/OpenChat-3.5-0106_8.11B_36Layers-Appended | Pretergeek | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5975833011963811}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 8.114} |
HF Open LLM v2 | Pretergeek | Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Appended | eca9180f-20d5-4bcd-9a74-e2f69c4ea4ad | 0.0.1 | hfopenllm_v2/Pretergeek_OpenChat-3.5-0106_10.7B_48Layers-Appended/1762652579.815407 | 1762652579.815407 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Appended | Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Appended | Pretergeek | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5960595663949432}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 10.732} |
HF Open LLM v2 | Pretergeek | Pretergeek/OpenChat-3.5-0106_32K-PoSE | 195acbac-1db7-47ed-907f-98e312fc8921 | 0.0.1 | hfopenllm_v2/Pretergeek_OpenChat-3.5-0106_32K-PoSE/1762652579.815889 | 1762652579.8158898 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Pretergeek/OpenChat-3.5-0106_32K-PoSE | Pretergeek/OpenChat-3.5-0106_32K-PoSE | Pretergeek | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3968991165662664}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | Pretergeek | Pretergeek/OpenChat-3.5-0106_8.99B_40Layers-Appended | a70222dc-0589-4f09-ac8c-3ff4fa72328f | 0.0.1 | hfopenllm_v2/Pretergeek_OpenChat-3.5-0106_8.99B_40Layers-Appended/1762652579.81651 | 1762652579.816511 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Pretergeek/OpenChat-3.5-0106_8.99B_40Layers-Appended | Pretergeek/OpenChat-3.5-0106_8.99B_40Layers-Appended | Pretergeek | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5960595663949432}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 8.987} |
HF Open LLM v2 | Pretergeek | Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Interleaved | 65d32305-4f23-4041-a107-8625822c1322 | 0.0.1 | hfopenllm_v2/Pretergeek_OpenChat-3.5-0106_10.7B_48Layers-Interleaved/1762652579.81567 | 1762652579.815671 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Interleaved | Pretergeek/OpenChat-3.5-0106_10.7B_48Layers-Interleaved | Pretergeek | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5960595663949432}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 10.732} |
HF Open LLM v2 | Pretergeek | Pretergeek/OpenChat-3.5-0106_8.99B_40Layers-Interleaved | 19eb8f3a-ca9d-4da4-8e7e-96eebfd33576 | 0.0.1 | hfopenllm_v2/Pretergeek_OpenChat-3.5-0106_8.99B_40Layers-Interleaved/1762652579.816719 | 1762652579.816719 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Pretergeek/OpenChat-3.5-0106_8.99B_40Layers-Interleaved | Pretergeek/OpenChat-3.5-0106_8.99B_40Layers-Interleaved | Pretergeek | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5975833011963811}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 8.987} |
HF Open LLM v2 | Pretergeek | Pretergeek/OpenChat-3.5-0106_9.86B_44Layers-Appended | e44eddb9-9764-4bc9-be85-ec7995846da0 | 0.0.1 | hfopenllm_v2/Pretergeek_OpenChat-3.5-0106_9.86B_44Layers-Appended/1762652579.816936 | 1762652579.816937 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Pretergeek/OpenChat-3.5-0106_9.86B_44Layers-Appended | Pretergeek/OpenChat-3.5-0106_9.86B_44Layers-Appended | Pretergeek | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5960595663949432}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 9.859} |
HF Open LLM v2 | hongbai12 | hongbai12/li-0.4-pre | ab7dcb4c-3884-428f-b342-38034dd51b56 | 0.0.1 | hfopenllm_v2/hongbai12_li-0.4-pre/1762652580.191224 | 1762652580.191225 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | hongbai12/li-0.4-pre | hongbai12/li-0.4-pre | hongbai12 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5199725616918665}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | 3rd-Degree-Burn | 3rd-Degree-Burn/L-3.1-Science-Writer-8B | 0c4fd071-b5c9-4bf1-a1d5-d658be1a3258 | 0.0.1 | hfopenllm_v2/3rd-Degree-Burn_L-3.1-Science-Writer-8B/1762652579.470164 | 1762652579.470165 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | 3rd-Degree-Burn/L-3.1-Science-Writer-8B | 3rd-Degree-Burn/L-3.1-Science-Writer-8B | 3rd-Degree-Burn | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.42625012743963797}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | argilla | argilla/notus-7b-v1 | c06f66ea-d9e3-4902-b3fd-188110f9c1e4 | 0.0.1 | hfopenllm_v2/argilla_notus-7b-v1/1762652580.017684 | 1762652580.017685 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | argilla/notus-7b-v1 | argilla/notus-7b-v1 | argilla | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.508207112683236}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | argilla | argilla/notux-8x7b-v1 | 60185907-11c2-454c-bfbc-3c5741651ab7 | 0.0.1 | hfopenllm_v2/argilla_notux-8x7b-v1/1762652580.017979 | 1762652580.0179799 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | argilla/notux-8x7b-v1 | argilla/notux-8x7b-v1 | argilla | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5422290633297429}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 46.703} |
HF Open LLM v2 | SeaLLMs | SeaLLMs/SeaLLM-7B-v2.5 | 7117b360-ef16-4da9-9226-b66b6aac9703 | 0.0.1 | hfopenllm_v2/SeaLLMs_SeaLLM-7B-v2.5/1762652579.878138 | 1762652579.8781388 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SeaLLMs/SeaLLM-7B-v2.5 | SeaLLMs/SeaLLM-7B-v2.5 | SeaLLMs | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4521536190640833}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "GemmaForCausalLM", "params_billions": 8.538} |
HF Open LLM v2 | SeaLLMs | SeaLLMs/SeaLLMs-v3-7B-Chat | f119b2b5-2303-4772-9ae0-ce8f573f86c3 | 0.0.1 | hfopenllm_v2/SeaLLMs_SeaLLMs-v3-7B-Chat/1762652579.8783438 | 1762652579.878345 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SeaLLMs/SeaLLMs-v3-7B-Chat | SeaLLMs/SeaLLMs-v3-7B-Chat | SeaLLMs | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.43766539448662883}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | SeaLLMs | SeaLLMs/SeaLLM-7B-v2 | 8f41a438-e9b7-43c6-b0b2-447a71ac360f | 0.0.1 | hfopenllm_v2/SeaLLMs_SeaLLM-7B-v2/1762652579.877889 | 1762652579.877889 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | SeaLLMs/SeaLLM-7B-v2 | SeaLLMs/SeaLLM-7B-v2 | SeaLLMs | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.36712367629002157}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.376} |
HF Open LLM v2 | DavieLion | DavieLion/Lllma-3.2-1B | 274ed35b-4abe-4f20-bd18-7e386a7fdaa5 | 0.0.1 | hfopenllm_v2/DavieLion_Lllma-3.2-1B/1762652579.5458188 | 1762652579.54582 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DavieLion/Lllma-3.2-1B | DavieLion/Lllma-3.2-1B | DavieLion | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1601439735457475}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | senseable | senseable/WestLake-7B-v2 | 6ef15d50-74b7-4e09-856c-05343841e24b | 0.0.1 | hfopenllm_v2/senseable_WestLake-7B-v2/1762652580.511263 | 1762652580.511264 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | senseable/WestLake-7B-v2 | senseable/WestLake-7B-v2 | senseable | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4418620371724801}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | kyutai | kyutai/helium-1-preview-2b | ce4ddb86-646e-4c59-8a03-3687dbb77021 | 0.0.1 | hfopenllm_v2/kyutai_helium-1-preview-2b/1762652580.3111548 | 1762652580.3111548 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | kyutai/helium-1-preview-2b | kyutai/helium-1-preview-2b | kyutai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.26136096667952147}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "HeliumForCausalLM", "params_billions": 2.173} |
HF Open LLM v2 | Krystalan | Krystalan/DRT-o1-14B | dbd87f5e-e5ba-447b-8416-b6413c3dab09 | 0.0.1 | hfopenllm_v2/Krystalan_DRT-o1-14B/1762652579.70148 | 1762652579.7014809 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Krystalan/DRT-o1-14B | Krystalan/DRT-o1-14B | Krystalan | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4067662690549963}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | Krystalan | Krystalan/DRT-o1-7B | acb8e4cc-41b2-47ef-b819-d480189c618c | 0.0.1 | hfopenllm_v2/Krystalan_DRT-o1-7B/1762652579.701715 | 1762652579.701716 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Krystalan/DRT-o1-7B | Krystalan/DRT-o1-7B | Krystalan | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3928276971768242}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | xwen-team | xwen-team/Xwen-7B-Chat | a099778d-4c47-472e-872d-8fffcdf2764f | 0.0.1 | hfopenllm_v2/xwen-team_Xwen-7B-Chat/1762652580.602432 | 1762652580.602433 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | xwen-team/Xwen-7B-Chat | xwen-team/Xwen-7B-Chat | xwen-team | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6864098370102439}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Delta-Vector | Delta-Vector/Darkens-8B | a1689935-8ccb-49a8-8c2a-8dbf32b7ac02 | 0.0.1 | hfopenllm_v2/Delta-Vector_Darkens-8B/1762652579.5545971 | 1762652579.5545971 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Delta-Vector/Darkens-8B | Delta-Vector/Darkens-8B | Delta-Vector | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.25476624245889795}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 8.414} |
HF Open LLM v2 | Delta-Vector | Delta-Vector/Control-8B-V1.1 | 20796a87-8691-44b9-9b60-85ad3c7f4b7b | 0.0.1 | hfopenllm_v2/Delta-Vector_Control-8B-V1.1/1762652579.5543838 | 1762652579.554385 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Delta-Vector/Control-8B-V1.1 | Delta-Vector/Control-8B-V1.1 | Delta-Vector | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5696562897556262}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Delta-Vector | Delta-Vector/Baldur-8B | 6267c5c6-abd3-4eb0-94ca-5c569414e7a9 | 0.0.1 | hfopenllm_v2/Delta-Vector_Baldur-8B/1762652579.5538838 | 1762652579.553885 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Delta-Vector/Baldur-8B | Delta-Vector/Baldur-8B | Delta-Vector | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.47818233398493776}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.0} |
HF Open LLM v2 | Delta-Vector | Delta-Vector/Odin-9B | 586d4e20-c1f4-466a-8488-07ac18ad6253 | 0.0.1 | hfopenllm_v2/Delta-Vector_Odin-9B/1762652579.555037 | 1762652579.555038 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Delta-Vector/Odin-9B | Delta-Vector/Odin-9B | Delta-Vector | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3691970637907419}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 9.242} |
HF Open LLM v2 | Delta-Vector | Delta-Vector/Control-8B | 26dc4843-56a7-45b5-a61a-386e260574a2 | 0.0.1 | hfopenllm_v2/Delta-Vector_Control-8B/1762652579.554166 | 1762652579.554166 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Delta-Vector/Control-8B | Delta-Vector/Control-8B | Delta-Vector | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5489733906035985}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Delta-Vector | Delta-Vector/Tor-8B | ce7e8e58-e323-4704-b6f3-7fa6c5c3b7f2 | 0.0.1 | hfopenllm_v2/Delta-Vector_Tor-8B/1762652579.555239 | 1762652579.55524 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Delta-Vector/Tor-8B | Delta-Vector/Tor-8B | Delta-Vector | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.23815476269631244}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 8.414} |
HF Open LLM v2 | Delta-Vector | Delta-Vector/Henbane-7b-attempt2 | 73f9a017-15ac-42e6-9600-69b411de4086 | 0.0.1 | hfopenllm_v2/Delta-Vector_Henbane-7b-attempt2/1762652579.55481 | 1762652579.55481 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Delta-Vector/Henbane-7b-attempt2 | Delta-Vector/Henbane-7b-attempt2 | Delta-Vector | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4157335868828043}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | berkeley-nest | berkeley-nest/Starling-LM-7B-alpha | ddc116b6-5b9a-409f-a0ab-09e5630d1289 | 0.0.1 | hfopenllm_v2/berkeley-nest_Starling-LM-7B-alpha/1762652580.030957 | 1762652580.0309582 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | berkeley-nest/Starling-LM-7B-alpha | berkeley-nest/Starling-LM-7B-alpha | berkeley-nest | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5480491761858536}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | talha2001 | talha2001/Beast-Soul-new | 01f536ff-7613-4b09-b793-1f51bf32f705 | 0.0.1 | hfopenllm_v2/talha2001_Beast-Soul-new/1762652580.5509062 | 1762652580.5509079 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | talha2001/Beast-Soul-new | talha2001/Beast-Soul-new | talha2001 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4853510906616666}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | schnapss | schnapss/testmerge-7b | faa7be96-1419-48be-9b95-e97689296de0 | 0.0.1 | hfopenllm_v2/schnapss_testmerge-7b/1762652580.509877 | 1762652580.509878 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | schnapss/testmerge-7b | schnapss/testmerge-7b | schnapss | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.39222817679313116}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | HumanLLMs | HumanLLMs/Humanish-Qwen2.5-7B-Instruct | df720663-5e82-4de7-9a19-88287bb5f56a | 0.0.1 | hfopenllm_v2/HumanLLMs_Humanish-Qwen2.5-7B-Instruct/1762652579.645365 | 1762652579.645366 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | HumanLLMs/Humanish-Qwen2.5-7B-Instruct | HumanLLMs/Humanish-Qwen2.5-7B-Instruct | HumanLLMs | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7284250233824031}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | HumanLLMs | HumanLLMs/Humanish-LLama3-8B-Instruct | e69e4e90-8177-44f5-8497-0a45ca9155ea | 0.0.1 | hfopenllm_v2/HumanLLMs_Humanish-LLama3-8B-Instruct/1762652579.6448839 | 1762652579.644885 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | HumanLLMs/Humanish-LLama3-8B-Instruct | HumanLLMs/Humanish-LLama3-8B-Instruct | HumanLLMs | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6497903340913221}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | HumanLLMs | HumanLLMs/Humanish-Mistral-Nemo-Instruct-2407 | de0dbc50-5d26-4005-967c-3dcbde3a1282 | 0.0.1 | hfopenllm_v2/HumanLLMs_Humanish-Mistral-Nemo-Instruct-2407/1762652579.6451478 | 1762652579.645149 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | HumanLLMs/Humanish-Mistral-Nemo-Instruct-2407 | HumanLLMs/Humanish-Mistral-Nemo-Instruct-2407 | HumanLLMs | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5451269298793867}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | rhplus0831 | rhplus0831/maid-yuzu-v7 | 65e47b2d-982b-4fa8-b5bf-e002cf3cc293 | 0.0.1 | hfopenllm_v2/rhplus0831_maid-yuzu-v7/1762652580.494505 | 1762652580.494506 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | rhplus0831/maid-yuzu-v7 | rhplus0831/maid-yuzu-v7 | rhplus0831 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6462430794735745}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 46.703} |
HF Open LLM v2 | kaist-ai | kaist-ai/janus-rm-7b | 46f57920-759b-4d1a-b2f5-fe66aa740170 | 0.0.1 | hfopenllm_v2/kaist-ai_janus-rm-7b/1762652580.303882 | 1762652580.303883 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | kaist-ai/janus-rm-7b | kaist-ai/janus-rm-7b | kaist-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.177804891022487}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LLMForSequenceRegression", "params_billions": 7.111} |
HF Open LLM v2 | kaist-ai | kaist-ai/janus-7b | 3ab8b78b-a9f9-428c-9469-afaa4158a0a6 | 0.0.1 | hfopenllm_v2/kaist-ai_janus-7b/1762652580.303385 | 1762652580.3033862 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | kaist-ai/janus-7b | kaist-ai/janus-7b | kaist-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.37751499355044615}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | kaist-ai | kaist-ai/janus-dpo-7b | 2a78f22b-d898-4f92-a2a5-c2930c16916c | 0.0.1 | hfopenllm_v2/kaist-ai_janus-dpo-7b/1762652580.303661 | 1762652580.303662 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | kaist-ai/janus-dpo-7b | kaist-ai/janus-dpo-7b | kaist-ai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4002712802031942}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | nvidia | nvidia/AceInstruct-7B | d0680660-92e5-471b-a4c9-2658e7c59dd0 | 0.0.1 | hfopenllm_v2/nvidia_AceInstruct-7B/1762652580.412692 | 1762652580.412693 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nvidia/AceInstruct-7B | nvidia/AceInstruct-7B | nvidia | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5422290633297429}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | nvidia | nvidia/Hymba-1.5B-Instruct | ae6e9c29-eb12-4dd5-bdbc-e84b499cf40f | 0.0.1 | hfopenllm_v2/nvidia_Hymba-1.5B-Instruct/1762652580.414529 | 1762652580.41453 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nvidia/Hymba-1.5B-Instruct | nvidia/Hymba-1.5B-Instruct | nvidia | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6009055971488984}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "HymbaForCausalLM", "params_billions": 1.523} |
HF Open LLM v2 | nvidia | nvidia/Llama-3.1-Nemotron-70B-Instruct-HF | 2366b5e1-0a56-4d6e-83e6-12f12eca3ec4 | 0.0.1 | hfopenllm_v2/nvidia_Llama-3.1-Nemotron-70B-Instruct-HF/1762652580.415039 | 1762652580.41504 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nvidia/Llama-3.1-Nemotron-70B-Instruct-HF | nvidia/Llama-3.1-Nemotron-70B-Instruct-HF | nvidia | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7380672172059026}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | nvidia | nvidia/AceMath-1.5B-Instruct | 8584e2c5-dd32-4cd0-9089-1b4e17a1ffac | 0.0.1 | hfopenllm_v2/nvidia_AceMath-1.5B-Instruct/1762652580.412895 | 1762652580.412896 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nvidia/AceMath-1.5B-Instruct | nvidia/AceMath-1.5B-Instruct | nvidia | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.32123654126606294}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.777} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.