_leaderboard stringclasses 1
value | _developer stringclasses 559
values | _model stringlengths 9 102 | _uuid stringlengths 36 36 | schema_version stringclasses 1
value | evaluation_id stringlengths 35 133 | retrieved_timestamp stringlengths 13 18 | source_data stringclasses 1
value | evaluation_source_name stringclasses 1
value | evaluation_source_type stringclasses 1
value | source_organization_name stringclasses 1
value | source_organization_url null | source_organization_logo_url null | evaluator_relationship stringclasses 1
value | model_name stringlengths 4 102 | model_id stringlengths 9 102 | model_developer stringclasses 559
values | model_inference_platform stringclasses 1
value | evaluation_results stringlengths 1.35k 1.41k | additional_details stringclasses 660
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Base-8B-SFT-RDPO | 985ac874-e7eb-4431-81c2-a79f3865c696 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Base-8B-SFT-RDPO/1762652580.445683 | 1762652580.445684 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Base-8B-SFT-RDPO | princeton-nlp/Llama-3-Base-8B-SFT-RDPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4480068440626427}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-RRHF | e93eff52-c6e1-474e-8089-f672000fe1e5 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-RRHF/1762652580.4487302 | 1762652580.448731 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-RRHF | princeton-nlp/Llama-3-Instruct-8B-RRHF | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7274509412802475}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-CPO | 95eb37c8-2a58-45e3-bd86-2c305e3cb5dd | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-CPO/1762652580.4465249 | 1762652580.446526 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-CPO | princeton-nlp/Llama-3-Instruct-8B-CPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7292993701157373}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Base-8B-SFT-RRHF | cc9fb769-3d0b-4e53-9942-d4f99203a629 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Base-8B-SFT-RRHF/1762652580.445896 | 1762652580.445896 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Base-8B-SFT-RRHF | princeton-nlp/Llama-3-Base-8B-SFT-RRHF | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3357247658435174}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-KTO | e8602fbb-422c-464e-87f4-79c6e1a4afcf | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-KTO/1762652580.4474308 | 1762652580.447432 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-KTO | princeton-nlp/Llama-3-Instruct-8B-KTO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6864098370102439}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Instruct-RDPO | 9989efbb-bd01-4c7c-bf30-67fa81698906 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Instruct-RDPO/1762652580.452956 | 1762652580.452957 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Instruct-RDPO | princeton-nlp/Mistral-7B-Instruct-RDPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4887232542985944}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-SimPO-v0.2 | 5e499da1-f8c1-4830-828c-7d4013ea0243 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-SimPO-v0.2/1762652580.44994 | 1762652580.449941 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-SimPO-v0.2 | princeton-nlp/Llama-3-Instruct-8B-SimPO-v0.2 | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6808645505037745}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Base-SFT-RDPO | 034fa9fa-4103-428d-a50e-b117ef5e0726 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Base-SFT-RDPO/1762652580.451031 | 1762652580.4510322 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Base-SFT-RDPO | princeton-nlp/Mistral-7B-Base-SFT-RDPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.46064663980460735}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Instruct-SLiC-HF | 8b5493df-86fd-495a-8dce-9c5398795fc9 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Instruct-SLiC-HF/1762652580.453388 | 1762652580.4533892 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Instruct-SLiC-HF | princeton-nlp/Mistral-7B-Instruct-SLiC-HF | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5115294086357531}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Instruct-ORPO | 36735132-1510-42cf-a68a-c46507f52edb | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Instruct-ORPO/1762652580.452744 | 1762652580.452745 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Instruct-ORPO | princeton-nlp/Mistral-7B-Instruct-ORPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4719621714827768}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Base-8B-SFT-ORPO | ba821a1c-3b8e-4952-9f7b-b1f18923c4e7 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Base-8B-SFT-ORPO/1762652580.445469 | 1762652580.4454699 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Base-8B-SFT-ORPO | princeton-nlp/Llama-3-Base-8B-SFT-ORPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.45165383404921167}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-KTO-v0.2 | 5f35c42b-2d34-42bc-b94e-127a678cad2c | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-KTO-v0.2/1762652580.447652 | 1762652580.447653 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-KTO-v0.2 | princeton-nlp/Llama-3-Instruct-8B-KTO-v0.2 | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7290245437660962}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Instruct-SimPO | a3d0b6ec-e2be-4ca5-b083-df3c7ea0b385 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Instruct-SimPO/1762652580.45361 | 1762652580.45361 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Instruct-SimPO | princeton-nlp/Mistral-7B-Instruct-SimPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4686897432146704}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-ORPO | 8789e9aa-5cfb-4eca-9795-540c5a9b4bb4 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-ORPO/1762652580.447865 | 1762652580.4478662 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-ORPO | princeton-nlp/Llama-3-Instruct-8B-ORPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.712813113649561}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Base-SFT-DPO | 133d7669-db7f-47b6-b838-51b9577a9e68 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Base-SFT-DPO/1762652580.450392 | 1762652580.4503932 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Base-SFT-DPO | princeton-nlp/Mistral-7B-Base-SFT-DPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.44033830237104216}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Base-8B-SFT-IPO | 71d5525f-c257-4b88-b84d-d75b3a8328fc | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Base-8B-SFT-IPO/1762652580.444937 | 1762652580.444937 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Base-8B-SFT-IPO | princeton-nlp/Llama-3-Base-8B-SFT-IPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4486562321307464}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Base-8B-SFT-CPO | 342c7c0f-92f0-4296-8e0a-519724133bb5 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Base-8B-SFT-CPO/1762652580.444415 | 1762652580.444416 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Base-8B-SFT-CPO | princeton-nlp/Llama-3-Base-8B-SFT-CPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.37034623687371726}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-DPO-v0.2 | 6ae028c9-19d9-447b-93c1-c4548aef84f9 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-DPO-v0.2/1762652580.447217 | 1762652580.447217 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-DPO-v0.2 | princeton-nlp/Llama-3-Instruct-8B-DPO-v0.2 | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7208063493752133}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Base-8B-SFT-KTO | 6c0d909f-ee4f-4e1a-8db9-abf1920359ed | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Base-8B-SFT-KTO/1762652580.4452229 | 1762652580.445225 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Base-8B-SFT-KTO | princeton-nlp/Llama-3-Base-8B-SFT-KTO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4522533544329047}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-RDPO-v0.2 | 1c3ea099-8b3b-4184-9f30-e7cdeea8f24e | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-RDPO-v0.2/1762652580.448503 | 1762652580.448504 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-RDPO-v0.2 | princeton-nlp/Llama-3-Instruct-8B-RDPO-v0.2 | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7076922565459647}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-ORPO-v0.2 | 28bf3b2a-6c0c-4994-aaf5-80b67d82a955 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-ORPO-v0.2/1762652580.448072 | 1762652580.448073 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-ORPO-v0.2 | princeton-nlp/Llama-3-Instruct-8B-ORPO-v0.2 | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7633213207622442}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Base-SFT-SimPO | 9bed5ccb-35c0-40e1-89b8-617656787052 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Base-SFT-SimPO/1762652580.4516768 | 1762652580.451678 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Base-SFT-SimPO | princeton-nlp/Mistral-7B-Base-SFT-SimPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.47006387496287627}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Base-SFT-KTO | a0048817-4f45-4bca-ac1a-b7e0c25bd7ab | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Base-SFT-KTO/1762652580.450817 | 1762652580.450818 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Base-SFT-KTO | princeton-nlp/Mistral-7B-Base-SFT-KTO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.478481540091402}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Base-8B-SFT | 494df3f9-7ce9-4f81-99c4-e6100d6e4187 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Base-8B-SFT/1762652580.444184 | 1762652580.444185 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Base-8B-SFT | princeton-nlp/Llama-3-Base-8B-SFT | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.27959591661236627}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-SLiC-HF-v0.2 | 5a5746dd-0270-4151-b774-8eaa6860d5e0 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-SLiC-HF-v0.2/1762652580.4493709 | 1762652580.4493718 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-SLiC-HF-v0.2 | princeton-nlp/Llama-3-Instruct-8B-SLiC-HF-v0.2 | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7109646848140712}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Instruct-CPO | 259a0166-2ee3-409a-85ce-963d90d05ae7 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Instruct-CPO/1762652580.4518862 | 1762652580.4518871 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Instruct-CPO | princeton-nlp/Mistral-7B-Instruct-CPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4203047912871182}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-RDPO | 041d45dd-c371-4e9c-9cda-a63e3d7a1b2d | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-RDPO/1762652580.448289 | 1762652580.44829 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-RDPO | princeton-nlp/Llama-3-Instruct-8B-RDPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6660017642078574}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Base-8B-SFT-SLiC-HF | 596f4d11-f091-42c3-9f1e-b95e0ba6dbd9 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Base-8B-SFT-SLiC-HF/1762652580.4460979 | 1762652580.446099 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Base-8B-SFT-SLiC-HF | princeton-nlp/Llama-3-Base-8B-SFT-SLiC-HF | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4890479483326463}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Instruct-DPO | 0df26c01-7fae-4254-8e97-e03c6078d861 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Instruct-DPO/1762652580.4521 | 1762652580.4521 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Instruct-DPO | princeton-nlp/Mistral-7B-Instruct-DPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.517624347841505}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Instruct-KTO | ff079687-4519-4f0b-bb1e-2b447cb2b4c9 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Instruct-KTO/1762652580.452526 | 1762652580.452527 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Instruct-KTO | princeton-nlp/Mistral-7B-Instruct-KTO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4907966417993147}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Base-8B-SFT-DPO | 8afa4f43-96fb-46b1-84e8-bf98928aa484 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Base-8B-SFT-DPO/1762652580.444683 | 1762652580.444684 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Base-8B-SFT-DPO | princeton-nlp/Llama-3-Base-8B-SFT-DPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.41111251479407973}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Instruct-RRHF | 0a5ce684-675e-4fbe-b141-df12903228f9 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Instruct-RRHF/1762652580.4531672 | 1762652580.4531682 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Instruct-RRHF | princeton-nlp/Mistral-7B-Instruct-RRHF | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.49601723427173233}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Base-8B-SFT-SimPO | 314cfcd7-674a-49d2-adf5-6d45c30e2382 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Base-8B-SFT-SimPO/1762652580.446312 | 1762652580.446312 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Base-8B-SFT-SimPO | princeton-nlp/Llama-3-Base-8B-SFT-SimPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4685401401614383}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Instruct-IPO | fed6b773-040e-409b-884e-a97a1abfedc0 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Instruct-IPO/1762652580.45231 | 1762652580.45231 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Instruct-IPO | princeton-nlp/Mistral-7B-Instruct-IPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4929198969844457}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Llama-3-Instruct-8B-DPO | 81c7a3df-7e92-4efa-a323-51ea3e0a4fa6 | 0.0.1 | hfopenllm_v2/princeton-nlp_Llama-3-Instruct-8B-DPO/1762652580.447003 | 1762652580.447003 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Llama-3-Instruct-8B-DPO | princeton-nlp/Llama-3-Instruct-8B-DPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6757436934001781}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Base-SFT-SLiC-HF | 2c28dcd3-af20-41ab-9234-a8296ecc98c0 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Base-SFT-SLiC-HF/1762652580.451465 | 1762652580.451466 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Base-SFT-SLiC-HF | princeton-nlp/Mistral-7B-Base-SFT-SLiC-HF | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5127284494031392}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/gemma-2-9b-it-DPO | 5ed0019b-dc1e-4dd8-82e5-2d4cdb28beb9 | 0.0.1 | hfopenllm_v2/princeton-nlp_gemma-2-9b-it-DPO/1762652580.454305 | 1762652580.4543061 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/gemma-2-9b-it-DPO | princeton-nlp/gemma-2-9b-it-DPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.27687203287277756}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 9.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Base-SFT-CPO | 4c2ab1ed-8177-4518-ae3d-754f9711369d | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Base-SFT-CPO/1762652580.45017 | 1762652580.450171 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Base-SFT-CPO | princeton-nlp/Mistral-7B-Base-SFT-CPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.46549267055856236}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | princeton-nlp | princeton-nlp/Mistral-7B-Base-SFT-IPO | b402d383-b80e-4cd9-b2ec-a1e435f67ac5 | 0.0.1 | hfopenllm_v2/princeton-nlp_Mistral-7B-Base-SFT-IPO/1762652580.4506009 | 1762652580.450602 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | princeton-nlp/Mistral-7B-Base-SFT-IPO | princeton-nlp/Mistral-7B-Base-SFT-IPO | princeton-nlp | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.48295300912689443}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | brgx53 | brgx53/3Blarenegv2-ECE-PRYMMAL-Martial | 64e92286-72ea-4318-aaea-4e0be87a0067 | 0.0.1 | hfopenllm_v2/brgx53_3Blarenegv2-ECE-PRYMMAL-Martial/1762652580.04124 | 1762652580.04124 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | brgx53/3Blarenegv2-ECE-PRYMMAL-Martial | brgx53/3Blarenegv2-ECE-PRYMMAL-Martial | brgx53 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5661843907498769}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | brgx53 | brgx53/3Blareneg-ECE-PRYMMAL-Martial | 6fea29aa-174f-4e3f-be91-c79842126c2c | 0.0.1 | hfopenllm_v2/brgx53_3Blareneg-ECE-PRYMMAL-Martial/1762652580.041033 | 1762652580.041034 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | brgx53/3Blareneg-ECE-PRYMMAL-Martial | brgx53/3Blareneg-ECE-PRYMMAL-Martial | brgx53 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.28763902002242936}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | brgx53 | brgx53/Barracuda-PRYMMAL-ECE-TW3 | 70a11b76-f8e4-4cfb-8ab6-791c7e9ba113 | 0.0.1 | hfopenllm_v2/brgx53_Barracuda-PRYMMAL-ECE-TW3/1762652580.041505 | 1762652580.041506 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | brgx53/Barracuda-PRYMMAL-ECE-TW3 | brgx53/Barracuda-PRYMMAL-ECE-TW3 | brgx53 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16401592219754696}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.544} |
HF Open LLM v2 | brgx53 | brgx53/3Bgeneral-ECE-PRYMMAL-Martial | 06d2ac1d-d70c-4cda-997d-9d4d1ef50c5a | 0.0.1 | hfopenllm_v2/brgx53_3Bgeneral-ECE-PRYMMAL-Martial/1762652580.040573 | 1762652580.0405738 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | brgx53/3Bgeneral-ECE-PRYMMAL-Martial | brgx53/3Bgeneral-ECE-PRYMMAL-Martial | brgx53 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.32893057088525113}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | brgx53 | brgx53/3Bgeneralv2-ECE-PRYMMAL-Martial | c7f6603c-dcca-49b9-94bd-0a1fbf707dd9 | 0.0.1 | hfopenllm_v2/brgx53_3Bgeneralv2-ECE-PRYMMAL-Martial/1762652580.040823 | 1762652580.0408242 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | brgx53/3Bgeneralv2-ECE-PRYMMAL-Martial | brgx53/3Bgeneralv2-ECE-PRYMMAL-Martial | brgx53 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.567708125551315}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.0} |
HF Open LLM v2 | brgx53 | brgx53/LaConfiance-PRYMMAL-ECE-TW3 | f4766bd8-0130-4ed1-ae1c-8177a65d94a9 | 0.0.1 | hfopenllm_v2/brgx53_LaConfiance-PRYMMAL-ECE-TW3/1762652580.041717 | 1762652580.041717 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | brgx53/LaConfiance-PRYMMAL-ECE-TW3 | brgx53/LaConfiance-PRYMMAL-ECE-TW3 | brgx53 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1579209829917951}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.777} |
HF Open LLM v2 | Hastagaras | Hastagaras/L3.2-JametMini-3B-MK.III | cf208ef7-8a9b-4633-8161-dae0825c380e | 0.0.1 | hfopenllm_v2/Hastagaras_L3.2-JametMini-3B-MK.III/1762652579.6376362 | 1762652579.6376371 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Hastagaras/L3.2-JametMini-3B-MK.III | Hastagaras/L3.2-JametMini-3B-MK.III | Hastagaras | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6182662003484088}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | Intel | Intel/neural-chat-7b-v3-1 | 23b6bf8e-c79a-4620-9e15-2742f45130af | 0.0.1 | hfopenllm_v2/Intel_neural-chat-7b-v3-1/1762652579.6473012 | 1762652579.647302 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Intel/neural-chat-7b-v3-1 | Intel/neural-chat-7b-v3-1 | Intel | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4686897432146704}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | Intel | Intel/neural-chat-7b-v3-2 | f8842523-53de-4197-9cf4-979780cbe127 | 0.0.1 | hfopenllm_v2/Intel_neural-chat-7b-v3-2/1762652579.647583 | 1762652579.647584 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Intel/neural-chat-7b-v3-2 | Intel/neural-chat-7b-v3-2 | Intel | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4988397452093778}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | Intel | Intel/neural-chat-7b-v3-3 | 0bec0f9a-863b-42f5-96eb-7263eb1c8a61 | 0.0.1 | hfopenllm_v2/Intel_neural-chat-7b-v3-3/1762652579.6477928 | 1762652579.647794 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Intel/neural-chat-7b-v3-3 | Intel/neural-chat-7b-v3-3 | Intel | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4762585495374495}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | Intel | Intel/neural-chat-7b-v3 | 617dbd41-3ca3-46d8-8fd2-491d6be39554 | 0.0.1 | hfopenllm_v2/Intel_neural-chat-7b-v3/1762652579.646828 | 1762652579.6468291 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Intel/neural-chat-7b-v3 | Intel/neural-chat-7b-v3 | Intel | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.27779735546128714}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct | ad99531d-4d52-4175-8ebd-cb172b4577de | 0.0.1 | hfopenllm_v2/VAGOsolutions_Llama-3-SauerkrautLM-8b-Instruct/1762652579.93995 | 1762652579.9399512 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct | VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.744536718130117}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/SauerkrautLM-v2-14b-SFT | d1b47391-f36e-4819-8093-5aff774dff94 | 0.0.1 | hfopenllm_v2/VAGOsolutions_SauerkrautLM-v2-14b-SFT/1762652579.94341 | 1762652579.9434109 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/SauerkrautLM-v2-14b-SFT | VAGOsolutions/SauerkrautLM-v2-14b-SFT | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6948529900663573}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/SauerkrautLM-Nemo-12b-Instruct | b5db7846-f777-4fa8-86e9-f09fdee1dfee | 0.0.1 | hfopenllm_v2/VAGOsolutions_SauerkrautLM-Nemo-12b-Instruct/1762652579.942016 | 1762652579.942017 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/SauerkrautLM-Nemo-12b-Instruct | VAGOsolutions/SauerkrautLM-Nemo-12b-Instruct | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6112969144093228}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/SauerkrautLM-v2-14b-DPO | e4b13fb1-11c0-4696-856f-de393fe2f8b2 | 0.0.1 | hfopenllm_v2/VAGOsolutions_SauerkrautLM-v2-14b-DPO/1762652579.943197 | 1762652579.943197 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/SauerkrautLM-v2-14b-DPO | VAGOsolutions/SauerkrautLM-v2-14b-DPO | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7411645544931892}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/SauerkrautLM-Mixtral-8x7B-Instruct | f105fe57-632a-4e3b-bbcb-f063f2e10874 | 0.0.1 | hfopenllm_v2/VAGOsolutions_SauerkrautLM-Mixtral-8x7B-Instruct/1762652579.9418082 | 1762652579.941809 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/SauerkrautLM-Mixtral-8x7B-Instruct | VAGOsolutions/SauerkrautLM-Mixtral-8x7B-Instruct | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5601891869129465}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 46.703} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/SauerkrautLM-7b-HerO | be74b2d6-28b9-4227-b0ec-fbad4b7dada6 | 0.0.1 | hfopenllm_v2/VAGOsolutions_SauerkrautLM-7b-HerO/1762652579.940931 | 1762652579.940931 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/SauerkrautLM-7b-HerO | VAGOsolutions/SauerkrautLM-7b-HerO | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.534610389322553}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/SauerkrautLM-7b-LaserChat | 35512aeb-611a-46a8-849e-442fc3fcc23a | 0.0.1 | hfopenllm_v2/VAGOsolutions_SauerkrautLM-7b-LaserChat/1762652579.941142 | 1762652579.941143 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/SauerkrautLM-7b-LaserChat | VAGOsolutions/SauerkrautLM-7b-LaserChat | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5987823419637672}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/Llama-3-SauerkrautLM-70b-Instruct | eb8adbdf-2cfb-4e9e-8f75-ce2734907725 | 0.0.1 | hfopenllm_v2/VAGOsolutions_Llama-3-SauerkrautLM-70b-Instruct/1762652579.939689 | 1762652579.939689 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/Llama-3-SauerkrautLM-70b-Instruct | VAGOsolutions/Llama-3-SauerkrautLM-70b-Instruct | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8044621604010691}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/SauerkrautLM-1.5b | 22ae39ae-883c-43a7-abbe-3213b9035b58 | 0.0.1 | hfopenllm_v2/VAGOsolutions_SauerkrautLM-1.5b/1762652579.940706 | 1762652579.940707 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/SauerkrautLM-1.5b | VAGOsolutions/SauerkrautLM-1.5b | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.24040324117785256}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.544} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/Llama-3.1-SauerkrautLM-70b-Instruct | 2e3eca4b-4c15-4b3b-8c44-3a23312a0797 | 0.0.1 | hfopenllm_v2/VAGOsolutions_Llama-3.1-SauerkrautLM-70b-Instruct/1762652579.940237 | 1762652579.940238 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/Llama-3.1-SauerkrautLM-70b-Instruct | VAGOsolutions/Llama-3.1-SauerkrautLM-70b-Instruct | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8656365111238181}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/SauerkrautLM-SOLAR-Instruct | 24fbb409-3b1a-4ed2-8866-547a7f02c5dc | 0.0.1 | hfopenllm_v2/VAGOsolutions_SauerkrautLM-SOLAR-Instruct/1762652579.942544 | 1762652579.942544 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/SauerkrautLM-SOLAR-Instruct | VAGOsolutions/SauerkrautLM-SOLAR-Instruct | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.49172085621705963}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 10.732} |
HF Open LLM v2 | VAGOsolutions | VAGOsolutions/Llama-3.1-SauerkrautLM-8b-Instruct | aa425d3e-e363-46bf-a5fb-cbf524657e85 | 0.0.1 | hfopenllm_v2/VAGOsolutions_Llama-3.1-SauerkrautLM-8b-Instruct/1762652579.9404852 | 1762652579.940486 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | VAGOsolutions/Llama-3.1-SauerkrautLM-8b-Instruct | VAGOsolutions/Llama-3.1-SauerkrautLM-8b-Instruct | VAGOsolutions | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8017393848322452}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | mrdayl | mrdayl/OpenCognito | 049eb195-7ca8-42a7-bf2a-e072b7929958 | 0.0.1 | hfopenllm_v2/mrdayl_OpenCognito/1762652580.373594 | 1762652580.373594 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mrdayl/OpenCognito | mrdayl/OpenCognito | mrdayl | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.40621661635571393}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.086} |
HF Open LLM v2 | mrdayl | mrdayl/OpenThink | ae71ec28-7e22-42c4-8549-4334dff8a811 | 0.0.1 | hfopenllm_v2/mrdayl_OpenThink/1762652580.374203 | 1762652580.374204 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mrdayl/OpenThink | mrdayl/OpenThink | mrdayl | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20540720842919008}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.777} |
HF Open LLM v2 | mrdayl | mrdayl/OpenCognito-r1 | 91e89f4c-d05b-476a-a8d9-0186ef8d1418 | 0.0.1 | hfopenllm_v2/mrdayl_OpenCognito-r1/1762652580.3737972 | 1762652580.373798 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mrdayl/OpenCognito-r1 | mrdayl/OpenCognito-r1 | mrdayl | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.42412687225450696}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.086} |
HF Open LLM v2 | mrdayl | mrdayl/OpenCogito | aacaba19-8c17-4d20-b27b-672810272ed4 | 0.0.1 | hfopenllm_v2/mrdayl_OpenCogito/1762652580.373355 | 1762652580.373356 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mrdayl/OpenCogito | mrdayl/OpenCogito | mrdayl | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3933773498761065}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.086} |
HF Open LLM v2 | mrdayl | mrdayl/OpenCognito-r2 | 672c6991-3c7b-48c3-9e95-389175e7cd6b | 0.0.1 | hfopenllm_v2/mrdayl_OpenCognito-r2/1762652580.373997 | 1762652580.3739982 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mrdayl/OpenCognito-r2 | mrdayl/OpenCognito-r2 | mrdayl | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3958751667797001}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.086} |
HF Open LLM v2 | Undi95 | Undi95/MG-FinalMix-72B | 3d3598fa-4b23-4ec6-a010-fb20232a5121 | 0.0.1 | hfopenllm_v2/Undi95_MG-FinalMix-72B/1762652579.938925 | 1762652579.938925 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Undi95/MG-FinalMix-72B | Undi95/MG-FinalMix-72B | Undi95 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8013648231137825}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.706} |
HF Open LLM v2 | Ahdoot | Ahdoot/Test_StealthThinker | 43c907eb-3e43-47ff-b38d-f912ba6ef46c | 0.0.1 | hfopenllm_v2/Ahdoot_Test_StealthThinker/1762652579.4775438 | 1762652579.4775438 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Ahdoot/Test_StealthThinker | Ahdoot/Test_StealthThinker | Ahdoot | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.42200361706937595}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.086} |
HF Open LLM v2 | Ahdoot | Ahdoot/StructuredThinker-v0.3-MoreStructure | 81a5aafb-2cf7-490d-b619-ce638fcc8b38 | 0.0.1 | hfopenllm_v2/Ahdoot_StructuredThinker-v0.3-MoreStructure/1762652579.4772868 | 1762652579.477288 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Ahdoot/StructuredThinker-v0.3-MoreStructure | Ahdoot/StructuredThinker-v0.3-MoreStructure | Ahdoot | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4192808415005519}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.397} |
HF Open LLM v2 | vihangd | vihangd/smart-dan-sft-v0.1 | 00de0fac-e1a7-449a-969d-624cbe9adab1 | 0.0.1 | hfopenllm_v2/vihangd_smart-dan-sft-v0.1/1762652580.589078 | 1762652580.5890791 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | vihangd/smart-dan-sft-v0.1 | vihangd/smart-dan-sft-v0.1 | vihangd | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.15764615664215392}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "4bit", "architecture": "LlamaForCausalLM", "params_billions": 0.379} |
HF Open LLM v2 | V3N0M | V3N0M/Jenna-Tiny-2.0 | d9785857-b164-4d38-8d03-0e03e2d0fbf5 | 0.0.1 | hfopenllm_v2/V3N0M_Jenna-Tiny-2.0/1762652579.9394162 | 1762652579.9394171 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | V3N0M/Jenna-Tiny-2.0 | V3N0M/Jenna-Tiny-2.0 | V3N0M | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2309361383351729}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 0.631} |
HF Open LLM v2 | ZHLiu627 | ZHLiu627/zephyr-7b-gemma-dpo-avg | 856a1f50-7ffb-4eb1-be4a-8aaa3cd6ee66 | 0.0.1 | hfopenllm_v2/ZHLiu627_zephyr-7b-gemma-dpo-avg/1762652579.9658082 | 1762652579.9658089 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ZHLiu627/zephyr-7b-gemma-dpo-avg | ZHLiu627/zephyr-7b-gemma-dpo-avg | ZHLiu627 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.30899679517014855}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GemmaForCausalLM", "params_billions": 8.538} |
HF Open LLM v2 | ModelCloud | ModelCloud/Llama-3.2-1B-Instruct-gptqmodel-4bit-vortex-v1 | 4a68c55f-ac3d-4173-a1cc-8bb97a2b8466 | 0.0.1 | hfopenllm_v2/ModelCloud_Llama-3.2-1B-Instruct-gptqmodel-4bit-vortex-v1/1762652579.761516 | 1762652579.761517 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ModelCloud/Llama-3.2-1B-Instruct-gptqmodel-4bit-vortex-v1 | ModelCloud/Llama-3.2-1B-Instruct-gptqmodel-4bit-vortex-v1 | ModelCloud | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5268919799465418}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 5.453} |
HF Open LLM v2 | anthracite-org | anthracite-org/magnum-v4-27b | 113ce0c6-c292-4924-adca-afdbcdd4c381 | 0.0.1 | hfopenllm_v2/anthracite-org_magnum-v4-27b/1762652580.013432 | 1762652580.013433 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | anthracite-org/magnum-v4-27b | anthracite-org/magnum-v4-27b | anthracite-org | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.34541682735142754}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 27.227} |
HF Open LLM v2 | anthracite-org | anthracite-org/magnum-v4-22b | 5e3f808c-964d-492d-a003-37594dd36f89 | 0.0.1 | hfopenllm_v2/anthracite-org_magnum-v4-22b/1762652580.013223 | 1762652580.013224 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | anthracite-org/magnum-v4-22b | anthracite-org/magnum-v4-22b | anthracite-org | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5628620947973599}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 22.247} |
HF Open LLM v2 | anthracite-org | anthracite-org/magnum-v4-9b | 55401aa6-ad61-42d6-9163-5d105a9091bf | 0.0.1 | hfopenllm_v2/anthracite-org_magnum-v4-9b/1762652580.013639 | 1762652580.013639 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | anthracite-org/magnum-v4-9b | anthracite-org/magnum-v4-9b | anthracite-org | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3502628581053826}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 9.242} |
HF Open LLM v2 | anthracite-org | anthracite-org/magnum-v1-72b | 6d98f0fa-25c9-409b-b82e-b3c128bf47b6 | 0.0.1 | hfopenllm_v2/anthracite-org_magnum-v1-72b/1762652580.0112262 | 1762652580.011227 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | anthracite-org/magnum-v1-72b | anthracite-org/magnum-v1-72b | anthracite-org | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7606484128778308}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.706} |
HF Open LLM v2 | anthracite-org | anthracite-org/magnum-v4-12b | c7ba8947-fd38-4ba1-9169-6c9164123273 | 0.0.1 | hfopenllm_v2/anthracite-org_magnum-v4-12b/1762652580.013016 | 1762652580.013016 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | anthracite-org/magnum-v4-12b | anthracite-org/magnum-v4-12b | anthracite-org | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.33929640021808805}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | anthracite-org | anthracite-org/magnum-v2.5-12b-kto | 74e67572-01d9-4890-9c5a-27b5559cf752 | 0.0.1 | hfopenllm_v2/anthracite-org_magnum-v2.5-12b-kto/1762652580.011887 | 1762652580.011888 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | anthracite-org/magnum-v2.5-12b-kto | anthracite-org/magnum-v2.5-12b-kto | anthracite-org | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3865576669902525}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | anthracite-org | anthracite-org/magnum-v3-9b-chatml | 42df1809-0021-4968-a18b-86cefc0125d7 | 0.0.1 | hfopenllm_v2/anthracite-org_magnum-v3-9b-chatml/1762652580.0125592 | 1762652580.0125592 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | anthracite-org/magnum-v3-9b-chatml | anthracite-org/magnum-v3-9b-chatml | anthracite-org | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.12747066671985885}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 9.242} |
HF Open LLM v2 | anthracite-org | anthracite-org/magnum-v2-12b | 72821a7d-cc27-4557-82d4-7e30286ea126 | 0.0.1 | hfopenllm_v2/anthracite-org_magnum-v2-12b/1762652580.011473 | 1762652580.011474 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | anthracite-org/magnum-v2-12b | anthracite-org/magnum-v2-12b | anthracite-org | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.376166349729828}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | anthracite-org | anthracite-org/magnum-v3-27b-kto | 9a74a1f1-0322-4f96-8e52-76bbde948fa9 | 0.0.1 | hfopenllm_v2/anthracite-org_magnum-v3-27b-kto/1762652580.012144 | 1762652580.0121448 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | anthracite-org/magnum-v3-27b-kto | anthracite-org/magnum-v3-27b-kto | anthracite-org | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5674831668860845}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 27.227} |
HF Open LLM v2 | anthracite-org | anthracite-org/magnum-v2-72b | 31d80ab1-348f-4b5a-963e-f027adf32101 | 0.0.1 | hfopenllm_v2/anthracite-org_magnum-v2-72b/1762652580.01168 | 1762652580.01168 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | anthracite-org/magnum-v2-72b | anthracite-org/magnum-v2-72b | anthracite-org | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7560273407891063}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.706} |
HF Open LLM v2 | anthracite-org | anthracite-org/magnum-v3-34b | 8ace78d5-5390-49ec-935d-2c7faf7569ca | 0.0.1 | hfopenllm_v2/anthracite-org_magnum-v3-34b/1762652580.012352 | 1762652580.012352 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | anthracite-org/magnum-v3-34b | anthracite-org/magnum-v3-34b | anthracite-org | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5115294086357531}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 34.389} |
HF Open LLM v2 | matouLeLoup | matouLeLoup/ECE-PRYMMAL-0.5B-FT-EnhancedMUSREnsembleV3 | 4800a6d0-8458-405a-95ca-6d0690a8f769 | 0.0.1 | hfopenllm_v2/matouLeLoup_ECE-PRYMMAL-0.5B-FT-EnhancedMUSREnsembleV3/1762652580.340896 | 1762652580.340897 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | matouLeLoup/ECE-PRYMMAL-0.5B-FT-EnhancedMUSREnsembleV3 | matouLeLoup/ECE-PRYMMAL-0.5B-FT-EnhancedMUSREnsembleV3 | matouLeLoup | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18732186154957736}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | matouLeLoup | matouLeLoup/ECE-PRYMMAL-0.5B-FT-V4-MUSR-ENSEMBLE-Mathis | b88d6df2-5642-4837-bf04-4d804a4ba3c4 | 0.0.1 | hfopenllm_v2/matouLeLoup_ECE-PRYMMAL-0.5B-FT-V4-MUSR-ENSEMBLE-Mathis/1762652580.341354 | 1762652580.341354 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | matouLeLoup/ECE-PRYMMAL-0.5B-FT-V4-MUSR-ENSEMBLE-Mathis | matouLeLoup/ECE-PRYMMAL-0.5B-FT-V4-MUSR-ENSEMBLE-Mathis | matouLeLoup | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18732186154957736}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | matouLeLoup | matouLeLoup/ECE-PRYMMAL-0.5B-FT-MUSR-ENSEMBLE-V2Mathis | 95c9ef47-8194-4c00-bbea-a65a7715f9f3 | 0.0.1 | hfopenllm_v2/matouLeLoup_ECE-PRYMMAL-0.5B-FT-MUSR-ENSEMBLE-V2Mathis/1762652580.3411388 | 1762652580.34114 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | matouLeLoup/ECE-PRYMMAL-0.5B-FT-MUSR-ENSEMBLE-V2Mathis | matouLeLoup/ECE-PRYMMAL-0.5B-FT-MUSR-ENSEMBLE-V2Mathis | matouLeLoup | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18732186154957736}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | matouLeLoup | matouLeLoup/ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis | 8da1b04b-c3a8-4554-bcb5-0e08dcfd7483 | 0.0.1 | hfopenllm_v2/matouLeLoup_ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis/1762652580.3417778 | 1762652580.341779 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | matouLeLoup/ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis | matouLeLoup/ECE-PRYMMAL-0.5B-FT-V5-MUSR-Mathis | matouLeLoup | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16521496296493304}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "?", "params_billions": 0.63} |
HF Open LLM v2 | matouLeLoup | matouLeLoup/ECE-PRYMMAL-0.5B-FT-V4-MUSR-Mathis | 679f1499-572e-4f60-9b2d-4c8199d71107 | 0.0.1 | hfopenllm_v2/matouLeLoup_ECE-PRYMMAL-0.5B-FT-V4-MUSR-Mathis/1762652580.341564 | 1762652580.341565 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | matouLeLoup/ECE-PRYMMAL-0.5B-FT-V4-MUSR-Mathis | matouLeLoup/ECE-PRYMMAL-0.5B-FT-V4-MUSR-Mathis | matouLeLoup | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18824607596732226}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | Wladastic | Wladastic/Mini-Think-Base-1B | 5f9a01b0-632a-4ee4-aedc-279002c7496c | 0.0.1 | hfopenllm_v2/Wladastic_Mini-Think-Base-1B/1762652579.951128 | 1762652579.9511292 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Wladastic/Mini-Think-Base-1B | Wladastic/Mini-Think-Base-1B | Wladastic | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5588405430923283}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | Mostafa8Mehrabi | Mostafa8Mehrabi/llama-3.2-1b-Insomnia-ChatBot-merged | 940d1360-047b-4c12-a7e5-cd002675c69c | 0.0.1 | hfopenllm_v2/Mostafa8Mehrabi_llama-3.2-1b-Insomnia-ChatBot-merged/1762652579.7624152 | 1762652579.7624161 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Mostafa8Mehrabi/llama-3.2-1b-Insomnia-ChatBot-merged | Mostafa8Mehrabi/llama-3.2-1b-Insomnia-ChatBot-merged | Mostafa8Mehrabi | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.13206735905176042}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | ministral | ministral/Ministral-3b-instruct | 83b6f014-f8a0-4e69-ae60-cc3a7aeaaf1c | 0.0.1 | hfopenllm_v2/ministral_Ministral-3b-instruct/1762652580.360654 | 1762652580.360655 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ministral/Ministral-3b-instruct | ministral/Ministral-3b-instruct | ministral | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1357642167227401}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 3.316} |
HF Open LLM v2 | weathermanj | weathermanj/Menda-3b-Optim-100 | e33fb04e-ac99-423f-ac8c-5268e527bf34 | 0.0.1 | hfopenllm_v2/weathermanj_Menda-3b-Optim-100/1762652580.5935092 | 1762652580.59351 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | weathermanj/Menda-3b-Optim-100 | weathermanj/Menda-3b-Optim-100 | weathermanj | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6398234462337709}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.086} |
HF Open LLM v2 | weathermanj | weathermanj/Menda-3b-Optim-200 | b8b84752-c112-47be-8a86-35ca0e578301 | 0.0.1 | hfopenllm_v2/weathermanj_Menda-3b-Optim-200/1762652580.5937102 | 1762652580.5937111 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | weathermanj/Menda-3b-Optim-200 | weathermanj/Menda-3b-Optim-200 | weathermanj | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6374752323834094}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.086} |
HF Open LLM v2 | weathermanj | weathermanj/Menda-3B-500 | 468d60fa-5c01-41bd-a791-e0e86c2d02bf | 0.0.1 | hfopenllm_v2/weathermanj_Menda-3B-500/1762652580.593058 | 1762652580.593059 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | weathermanj/Menda-3B-500 | weathermanj/Menda-3B-500 | weathermanj | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6353021095138676}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.086} |
HF Open LLM v2 | weathermanj | weathermanj/Menda-3b-750 | 9f1f8a2e-3a63-4b8e-85e9-141477fddcc3 | 0.0.1 | hfopenllm_v2/weathermanj_Menda-3b-750/1762652580.593308 | 1762652580.593309 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | weathermanj/Menda-3b-750 | weathermanj/Menda-3b-750 | weathermanj | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6335035483627884}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.086} |
HF Open LLM v2 | Novaciano | Novaciano/BLAST_PROCESSING-3.2-1B | 61173be4-9a87-4dfa-812d-b414b4d2bccb | 0.0.1 | hfopenllm_v2/Novaciano_BLAST_PROCESSING-3.2-1B/1762652579.794129 | 1762652579.7941298 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Novaciano/BLAST_PROCESSING-3.2-1B | Novaciano/BLAST_PROCESSING-3.2-1B | Novaciano | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3921783091087204}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.498} |
HF Open LLM v2 | Novaciano | Novaciano/HarmfulProject-3.2-1B | 99b31db9-55f8-41c2-9eb9-f21511deccf0 | 0.0.1 | hfopenllm_v2/Novaciano_HarmfulProject-3.2-1B/1762652579.7958348 | 1762652579.795836 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Novaciano/HarmfulProject-3.2-1B | Novaciano/HarmfulProject-3.2-1B | Novaciano | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3873821460391761}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.498} |
HF Open LLM v2 | Novaciano | Novaciano/Cerberus-3.2-1B | 2d6ff76b-9d81-45a7-8768-6a240b5395ab | 0.0.1 | hfopenllm_v2/Novaciano_Cerberus-3.2-1B/1762652579.7945569 | 1762652579.794559 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Novaciano/Cerberus-3.2-1B | Novaciano/Cerberus-3.2-1B | Novaciano | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5016877440746109}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.