_leaderboard stringclasses 1
value | _developer stringclasses 559
values | _model stringlengths 9 102 | _uuid stringlengths 36 36 | schema_version stringclasses 1
value | evaluation_id stringlengths 35 133 | retrieved_timestamp stringlengths 13 18 | source_data stringclasses 1
value | evaluation_source_name stringclasses 1
value | evaluation_source_type stringclasses 1
value | source_organization_name stringclasses 1
value | source_organization_url null | source_organization_logo_url null | evaluator_relationship stringclasses 1
value | model_name stringlengths 4 102 | model_id stringlengths 9 102 | model_developer stringclasses 559
values | model_inference_platform stringclasses 1
value | evaluation_results stringlengths 1.35k 1.41k | additional_details stringclasses 660
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
HF Open LLM v2 | google | LenguajeNaturalAI/leniachat-gemma-2b-v0 | af954640-6806-4e4c-9c0b-b81215eadfc8 | 0.0.1 | hfopenllm_v2/LenguajeNaturalAI_leniachat-gemma-2b-v0/1762652579.7101068 | 1762652579.7101078 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | LenguajeNaturalAI/leniachat-gemma-2b-v0 | LenguajeNaturalAI/leniachat-gemma-2b-v0 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21497404664069114}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GemmaForCausalLM", "params_billions": 2.506} |
HF Open LLM v2 | google | Supichi/BBAI_135_Gemma | 64cd00af-6782-431b-aac1-445e39d56717 | 0.0.1 | hfopenllm_v2/Supichi_BBAI_135_Gemma/1762652579.8946822 | 1762652579.894683 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Supichi/BBAI_135_Gemma | Supichi/BBAI_135_Gemma | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.06562144000141845}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 19.3} |
HF Open LLM v2 | google | dwikitheduck/gemma-2-2b-id-inst | 6d66b056-c83d-49b8-ac84-04396c0d97df | 0.0.1 | hfopenllm_v2/dwikitheduck_gemma-2-2b-id-inst/1762652580.137194 | 1762652580.137195 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | dwikitheduck/gemma-2-2b-id-inst | dwikitheduck/gemma-2-2b-id-inst | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.38785644312646006}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Gemma2ForCausalLM", "params_billions": 2.0} |
HF Open LLM v2 | google | dwikitheduck/gemma-2-2b-id | 000b7f0b-9e2f-499a-9bab-b08767efb8ca | 0.0.1 | hfopenllm_v2/dwikitheduck_gemma-2-2b-id/1762652580.136933 | 1762652580.136933 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | dwikitheduck/gemma-2-2b-id | dwikitheduck/gemma-2-2b-id | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.38785644312646006}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Gemma2ForCausalLM", "params_billions": 2.0} |
HF Open LLM v2 | google | lkoenig/BBAI_200_Gemma | b71c5ede-010d-4ce4-9f12-552388e2d9eb | 0.0.1 | hfopenllm_v2/lkoenig_BBAI_200_Gemma/1762652580.32272 | 1762652580.32272 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | lkoenig/BBAI_200_Gemma | lkoenig/BBAI_200_Gemma | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.07051733843978422}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 19.3} |
HF Open LLM v2 | google | bunnycore/Gemma2-9B-TitanFusion | 95a2d032-e2a4-46df-84d2-6b7529d5bb01 | 0.0.1 | hfopenllm_v2/bunnycore_Gemma2-9B-TitanFusion/1762652580.044988 | 1762652580.0449889 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Gemma2-9B-TitanFusion | bunnycore/Gemma2-9B-TitanFusion | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16184169115724056}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | bunnycore/Gemma-2-2B-Smart | ebada07f-e700-4f38-aec0-f801959969e6 | 0.0.1 | hfopenllm_v2/bunnycore_Gemma-2-2B-Smart/1762652580.044707 | 1762652580.044708 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | bunnycore/Gemma-2-2B-Smart | bunnycore/Gemma-2-2B-Smart | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.13206625088099574}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 2.614} |
HF Open LLM v2 | google | BlackBeenie/Neos-Gemma-2-9b | ea9ebbaa-fb04-491d-adc2-0389cb5d1ef6 | 0.0.1 | hfopenllm_v2/BlackBeenie_Neos-Gemma-2-9b/1762652579.4958751 | 1762652579.495876 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | BlackBeenie/Neos-Gemma-2-9b | BlackBeenie/Neos-Gemma-2-9b | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5875665456544192}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Gemma2ForCausalLM", "params_billions": 9.242} |
HF Open LLM v2 | google | IlyaGusev/gemma-2-9b-it-abliterated | 8a81c9e6-1c72-46f6-98c6-0d3b28ba5633 | 0.0.1 | hfopenllm_v2/IlyaGusev_gemma-2-9b-it-abliterated/1762652579.646349 | 1762652579.6463501 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | IlyaGusev/gemma-2-9b-it-abliterated | IlyaGusev/gemma-2-9b-it-abliterated | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.747259493698941}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "Gemma2ForCausalLM", "params_billions": 9.242} |
HF Open LLM v2 | google | IlyaGusev/gemma-2-2b-it-abliterated | e3ee4f00-1037-4da7-96e2-934b5ccefd15 | 0.0.1 | hfopenllm_v2/IlyaGusev_gemma-2-2b-it-abliterated/1762652579.646105 | 1762652579.646106 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | IlyaGusev/gemma-2-2b-it-abliterated | IlyaGusev/gemma-2-2b-it-abliterated | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.533086654521115}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "Gemma2ForCausalLM", "params_billions": 2.614} |
HF Open LLM v2 | google | djuna/Gemma-2-gemmama-9b | b2f24392-29aa-4a24-b489-87ea9b85daea | 0.0.1 | hfopenllm_v2/djuna_Gemma-2-gemmama-9b/1762652580.12782 | 1762652580.127821 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | djuna/Gemma-2-gemmama-9b | djuna/Gemma-2-gemmama-9b | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7703404743857409}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | Columbia-NLP/LION-Gemma-2b-odpo-v1.0 | 25418041-6fe1-4cd8-88cb-79456a65210c | 0.0.1 | hfopenllm_v2/Columbia-NLP_LION-Gemma-2b-odpo-v1.0/1762652579.507273 | 1762652579.507273 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Columbia-NLP/LION-Gemma-2b-odpo-v1.0 | Columbia-NLP/LION-Gemma-2b-odpo-v1.0 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.30664858131978706}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GemmaForCausalLM", "params_billions": 2.506} |
HF Open LLM v2 | google | noname0202/gemma-2-2b-it-ties | 42bed40b-ac71-42c8-b56b-47d1f930c736 | 0.0.1 | hfopenllm_v2/noname0202_gemma-2-2b-it-ties/1762652580.4097438 | 1762652580.409745 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | noname0202/gemma-2-2b-it-ties | noname0202/gemma-2-2b-it-ties | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.12657083205893696}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 2.614} |
HF Open LLM v2 | google | zelk12/MT-Gen2-gemma-2-9B | 6f5cbf98-67b4-4651-acee-160fe2e36f59 | 0.0.1 | hfopenllm_v2/zelk12_MT-Gen2-gemma-2-9B/1762652580.613527 | 1762652580.613528 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Gen2-gemma-2-9B | zelk12/MT-Gen2-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7907485471881275}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/Gemma-2-TM-9B | 4d3c877e-3dea-44af-8133-d555355971f8 | 0.0.1 | hfopenllm_v2/zelk12_Gemma-2-TM-9B/1762652580.612811 | 1762652580.612811 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/Gemma-2-TM-9B | zelk12/Gemma-2-TM-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8044621604010691}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT4-Gen4-gemma-2-9B | b38dc953-12fb-41aa-a887-d9a30ff1799a | 0.0.1 | hfopenllm_v2/zelk12_MT4-Gen4-gemma-2-9B/1762652580.6246998 | 1762652580.624701 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT4-Gen4-gemma-2-9B | zelk12/MT4-Gen4-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7874262512356104}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.75 | a2b9a953-31e2-4a6f-8005-993e1133246e | 0.0.1 | hfopenllm_v2/zelk12_recoilme-gemma-2-Ataraxy-9B-v0.1-t0.75/1762652580.630381 | 1762652580.630382 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.75 | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.75 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7208063493752133}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT5-Gen4-gemma-2-9B | 6cbd7c31-df0a-4920-9c23-be53f107698e | 0.0.1 | hfopenllm_v2/zelk12_MT5-Gen4-gemma-2-9B/1762652580.62615 | 1762652580.6261508 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT5-Gen4-gemma-2-9B | zelk12/MT5-Gen4-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7834545672149895}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Merge2-MU-gemma-2-MTg2MT1g2-9B | b149c82e-0099-46f6-a302-0eac4127f418 | 0.0.1 | hfopenllm_v2/zelk12_MT-Merge2-MU-gemma-2-MTg2MT1g2-9B/1762652580.615718 | 1762652580.615718 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Merge2-MU-gemma-2-MTg2MT1g2-9B | zelk12/MT-Merge2-MU-gemma-2-MTg2MT1g2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7955945779420825}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT2-Gen7-gemma-2-9B | 4b9e66cf-0ddb-4878-8800-2bc05dec750a | 0.0.1 | hfopenllm_v2/zelk12_MT2-Gen7-gemma-2-9B/1762652580.621203 | 1762652580.621205 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT2-Gen7-gemma-2-9B | zelk12/MT2-Gen7-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17615482475387528}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT5-Max-Merge_02012025163610-gemma-2-9B | 6737b327-bd1c-4eee-a461-af685edcd7b5 | 0.0.1 | hfopenllm_v2/zelk12_MT5-Max-Merge_02012025163610-gemma-2-9B/1762652580.62657 | 1762652580.62657 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT5-Max-Merge_02012025163610-gemma-2-9B | zelk12/MT5-Max-Merge_02012025163610-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17615482475387528}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT5-Gen2-gemma-2-9B | d59d00da-e88f-4d1a-9c47-538020ae0114 | 0.0.1 | hfopenllm_v2/zelk12_MT5-Gen2-gemma-2-9B/1762652580.625738 | 1762652580.625739 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT5-Gen2-gemma-2-9B | zelk12/MT5-Gen2-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7962439660101863}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/Test01012025155054t0.5_gemma-2 | 73f07833-1d35-484f-8fe3-57f4c27e1277 | 0.0.1 | hfopenllm_v2/zelk12_Test01012025155054t0.5_gemma-2/1762652580.628514 | 1762652580.628514 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/Test01012025155054t0.5_gemma-2 | zelk12/Test01012025155054t0.5_gemma-2 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1555229014570229}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 3.817} |
HF Open LLM v2 | google | zelk12/MT1-Gen4-gemma-2-9B | e10f8a93-7131-446d-b792-d179f522a262 | 0.0.1 | hfopenllm_v2/zelk12_MT1-Gen4-gemma-2-9B/1762652580.617781 | 1762652580.617782 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT1-Gen4-gemma-2-9B | zelk12/MT1-Gen4-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7941207108250552}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT5-gemma-2-9B | dd306da8-60aa-4022-8d04-1942fd19bc0b | 0.0.1 | hfopenllm_v2/zelk12_MT5-gemma-2-9B/1762652580.6267788 | 1762652580.6267798 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT5-gemma-2-9B | zelk12/MT5-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8047868544351211}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/recoilme-gemma-2-Gutenberg-Doppel-9B-v0.1 | 774a3b0c-acae-4ad2-a2a6-42c30e1db7c0 | 0.0.1 | hfopenllm_v2/zelk12_recoilme-gemma-2-Gutenberg-Doppel-9B-v0.1/1762652580.630864 | 1762652580.6308649 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/recoilme-gemma-2-Gutenberg-Doppel-9B-v0.1 | zelk12/recoilme-gemma-2-Gutenberg-Doppel-9B-v0.1 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7615227596111651}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Gen2-GI-gemma-2-9B | 0cf7e394-67e2-4ca3-ab2e-00cd4165eaf8 | 0.0.1 | hfopenllm_v2/zelk12_MT-Gen2-GI-gemma-2-9B/1762652580.613308 | 1762652580.613309 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Gen2-GI-gemma-2-9B | zelk12/MT-Gen2-GI-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7913979352562313}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT1-Gen5-IF-gemma-2-S2DMv1-9B | 182a7558-c9f7-43a6-a928-d5d97e082a91 | 0.0.1 | hfopenllm_v2/zelk12_MT1-Gen5-IF-gemma-2-S2DMv1-9B/1762652580.617982 | 1762652580.6179829 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT1-Gen5-IF-gemma-2-S2DMv1-9B | zelk12/MT1-Gen5-IF-gemma-2-S2DMv1-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7929216700576691}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Merge3-gemma-2-9B | c2bad77e-c0d0-4a43-8853-9363cc618603 | 0.0.1 | hfopenllm_v2/zelk12_MT-Merge3-gemma-2-9B/1762652580.6161401 | 1762652580.616141 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Merge3-gemma-2-9B | zelk12/MT-Merge3-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7858526487497617}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT5-Gen5-gemma-2-9B | b4ca4df6-2631-4ba3-bb55-8eadec5dd348 | 0.0.1 | hfopenllm_v2/zelk12_MT5-Gen5-gemma-2-9B/1762652580.6263602 | 1762652580.6263611 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT5-Gen5-gemma-2-9B | zelk12/MT5-Gen5-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7947202312087482}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Merge6-gemma-2-9B | 3c796c74-d79c-4c9f-a5ab-dee6c237bde1 | 0.0.1 | hfopenllm_v2/zelk12_MT-Merge6-gemma-2-9B/1762652580.6167512 | 1762652580.6167512 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Merge6-gemma-2-9B | zelk12/MT-Merge6-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16946036516443036}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT1-Gen1-gemma-2-9B | b869eab0-f736-48ef-8870-b98636cc4da1 | 0.0.1 | hfopenllm_v2/zelk12_MT1-Gen1-gemma-2-9B/1762652580.617173 | 1762652580.617174 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT1-Gen1-gemma-2-9B | zelk12/MT1-Gen1-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7974430067775724}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT2-gemma-2-9B | 0644b140-506f-4c7a-ba59-50ab48fad799 | 0.0.1 | hfopenllm_v2/zelk12_MT2-gemma-2-9B/1762652580.6217349 | 1762652580.621736 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT2-gemma-2-9B | zelk12/MT2-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7885754243185858}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Max-Merge_02012025163610-gemma-2-9B | bfeb5972-e865-4892-b01b-0c92fdab79e9 | 0.0.1 | hfopenllm_v2/zelk12_MT-Max-Merge_02012025163610-gemma-2-9B/1762652580.6150799 | 1762652580.615081 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Max-Merge_02012025163610-gemma-2-9B | zelk12/MT-Max-Merge_02012025163610-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7907485471881275}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT3-Gen5-gemma-2-9B_v1 | 95fe9cce-c93d-47e3-a053-defe922abefa | 0.0.1 | hfopenllm_v2/zelk12_MT3-Gen5-gemma-2-9B_v1/1762652580.623179 | 1762652580.623179 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT3-Gen5-gemma-2-9B_v1 | zelk12/MT3-Gen5-gemma-2-9B_v1 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7996161296471141}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/Rv0.4DMv1t0.25-gemma-2-9B | 522e1145-3f25-4b5d-9b6a-7ad0047b2da5 | 0.0.1 | hfopenllm_v2/zelk12_Rv0.4DMv1t0.25-gemma-2-9B/1762652580.627404 | 1762652580.627404 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/Rv0.4DMv1t0.25-gemma-2-9B | zelk12/Rv0.4DMv1t0.25-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7496575752337131}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MTMaMe-Merge_02012025163610-gemma-2-9B | b1a8ede3-2f27-4825-a413-e1772743b7c6 | 0.0.1 | hfopenllm_v2/zelk12_MTMaMe-Merge_02012025163610-gemma-2-9B/1762652580.627192 | 1762652580.627192 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MTMaMe-Merge_02012025163610-gemma-2-9B | zelk12/MTMaMe-Merge_02012025163610-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17860277397305815}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1 | 6850eb56-9f2c-4d4f-a82a-29e24b81b8b3 | 0.0.1 | hfopenllm_v2/zelk12_recoilme-gemma-2-Ataraxy-9B-v0.1/1762652580.628911 | 1762652580.6289122 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1 | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7648949232480928}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT3-gemma-2-9B | 0b8f178b-9980-4250-bc82-66facb367eb8 | 0.0.1 | hfopenllm_v2/zelk12_MT3-gemma-2-9B/1762652580.623819 | 1762652580.62382 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT3-gemma-2-9B | zelk12/MT3-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7786085364610345}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/Rv0.4MT4g2-gemma-2-9B | 7e232332-cf13-4127-be18-1311921931e6 | 0.0.1 | hfopenllm_v2/zelk12_Rv0.4MT4g2-gemma-2-9B/1762652580.627839 | 1762652580.62784 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/Rv0.4MT4g2-gemma-2-9B | zelk12/Rv0.4MT4g2-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7320221456845614}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT4-Gen5-gemma-2-9B | 4a35f213-f9b7-40c5-b164-722f6b4ee933 | 0.0.1 | hfopenllm_v2/zelk12_MT4-Gen5-gemma-2-9B/1762652580.6249092 | 1762652580.62491 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT4-Gen5-gemma-2-9B | zelk12/MT4-Gen5-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7788833628106757}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Gen4-gemma-2-9B | 7442a4c1-e225-4cea-b107-2d975460e214 | 0.0.1 | hfopenllm_v2/zelk12_MT-Gen4-gemma-2-9B/1762652580.613958 | 1762652580.6139588 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Gen4-gemma-2-9B | zelk12/MT-Gen4-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7883005979689446}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Gen3-gemma-2-9B | 79319862-c5eb-40a1-9424-ecc3835c1c9e | 0.0.1 | hfopenllm_v2/zelk12_MT-Gen3-gemma-2-9B/1762652580.613742 | 1762652580.613743 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Gen3-gemma-2-9B | zelk12/MT-Gen3-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8020142111818863}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT3-Gen1-gemma-2-9B | 1964f25a-d5b2-467a-a30d-9338082bdcfb | 0.0.1 | hfopenllm_v2/zelk12_MT3-Gen1-gemma-2-9B/1762652580.6219652 | 1762652580.6219661 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT3-Gen1-gemma-2-9B | zelk12/MT3-Gen1-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7837792612490415}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT4-Gen3-gemma-2-9B | b84ca7e1-4746-449a-841f-fcfd71774104 | 0.0.1 | hfopenllm_v2/zelk12_MT4-Gen3-gemma-2-9B/1762652580.624489 | 1762652580.62449 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT4-Gen3-gemma-2-9B | zelk12/MT4-Gen3-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7840540875986826}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT2-Gen5-gemma-2-9B | 3f7eb2b4-8dfb-4bf5-a462-0c11ccbae935 | 0.0.1 | hfopenllm_v2/zelk12_MT2-Gen5-gemma-2-9B/1762652580.6205592 | 1762652580.6205592 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT2-Gen5-gemma-2-9B | zelk12/MT2-Gen5-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7749116787900548}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT2-Max-Merge_02012025163610-gemma-2-9B | 2144960d-f674-45bd-9509-3cf711dc697b | 0.0.1 | hfopenllm_v2/zelk12_MT2-Max-Merge_02012025163610-gemma-2-9B/1762652580.6214652 | 1762652580.6214678 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT2-Max-Merge_02012025163610-gemma-2-9B | zelk12/MT2-Max-Merge_02012025163610-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7901490268044344}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT2-Gen4-gemma-2-9B | eb55e4d5-dde4-4349-b8aa-9297604cedf0 | 0.0.1 | hfopenllm_v2/zelk12_MT2-Gen4-gemma-2-9B/1762652580.620331 | 1762652580.620331 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT2-Gen4-gemma-2-9B | zelk12/MT2-Gen4-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7895993741051521}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT3-Gen2-gemma-2-9B | 55315256-9b4d-4dbd-bc53-7ec384e0fdca | 0.0.1 | hfopenllm_v2/zelk12_MT3-Gen2-gemma-2-9B/1762652580.622196 | 1762652580.622197 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT3-Gen2-gemma-2-9B | zelk12/MT3-Gen2-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7843289139483238}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/recoilme-gemma-2-Ifable-9B-v0.1 | e8502d8d-87bd-444c-b41b-7f8d4eb15b29 | 0.0.1 | hfopenllm_v2/zelk12_recoilme-gemma-2-Ifable-9B-v0.1/1762652580.6310751 | 1762652580.631076 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/recoilme-gemma-2-Ifable-9B-v0.1 | zelk12/recoilme-gemma-2-Ifable-9B-v0.1 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7943955371746965}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT1-gemma-2-9B | 17cda965-9f4b-411c-977f-1fe3238f527f | 0.0.1 | hfopenllm_v2/zelk12_MT1-gemma-2-9B/1762652580.619083 | 1762652580.6190841 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT1-gemma-2-9B | zelk12/MT1-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7946703635243377}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT3-Max-Merge_02012025163610-gemma-2-9B | 42e21a24-7c3c-4e65-ad6e-0b18f6c048eb | 0.0.1 | hfopenllm_v2/zelk12_MT3-Max-Merge_02012025163610-gemma-2-9B/1762652580.623601 | 1762652580.623602 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT3-Max-Merge_02012025163610-gemma-2-9B | zelk12/MT3-Max-Merge_02012025163610-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17615482475387528}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Merge-gemma-2-9B | 8025c7ed-3553-489f-8858-091d1ff81a15 | 0.0.1 | hfopenllm_v2/zelk12_MT-Merge-gemma-2-9B/1762652580.615297 | 1762652580.615297 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Merge-gemma-2-9B | zelk12/MT-Merge-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8035379459833243}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT5-Gen1-gemma-2-9B | b311d3f4-6eda-4053-91d2-416c4d796c6d | 0.0.1 | hfopenllm_v2/zelk12_MT5-Gen1-gemma-2-9B/1762652580.625538 | 1762652580.625539 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT5-Gen1-gemma-2-9B | zelk12/MT5-Gen1-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7831298731809377}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT4-gemma-2-9B | a312ee46-fd2f-4a0d-a778-7e235910a147 | 0.0.1 | hfopenllm_v2/zelk12_MT4-gemma-2-9B/1762652580.62533 | 1762652580.625331 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT4-gemma-2-9B | zelk12/MT4-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7761605872418517}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT1-Max-Merge_02012025163610-gemma-2-9B | 01fcc284-cedc-48b7-bc21-b8ec6dd53d3c | 0.0.1 | hfopenllm_v2/zelk12_MT1-Max-Merge_02012025163610-gemma-2-9B/1762652580.618859 | 1762652580.61886 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT1-Max-Merge_02012025163610-gemma-2-9B | zelk12/MT1-Max-Merge_02012025163610-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7928718023732585}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT2-Gen3-gemma-2-9B | 1aa85069-5409-4c32-91d5-1f417be4e465 | 0.0.1 | hfopenllm_v2/zelk12_MT2-Gen3-gemma-2-9B/1762652580.620111 | 1762652580.620112 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT2-Gen3-gemma-2-9B | zelk12/MT2-Gen3-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7810066179958066}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT1-Gen5-gemma-2-9B | 46f2caf1-29e8-4173-b2b2-e54e905e71d9 | 0.0.1 | hfopenllm_v2/zelk12_MT1-Gen5-gemma-2-9B/1762652580.618199 | 1762652580.6182 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT1-Gen5-gemma-2-9B | zelk12/MT1-Gen5-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7794828831943688}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/recoilme-gemma-2-psy10k-mental_healt-9B-v0.1 | 735bed66-1e83-4647-b730-14f0d571d597 | 0.0.1 | hfopenllm_v2/zelk12_recoilme-gemma-2-psy10k-mental_healt-9B-v0.1/1762652580.631496 | 1762652580.631499 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/recoilme-gemma-2-psy10k-mental_healt-9B-v0.1 | zelk12/recoilme-gemma-2-psy10k-mental_healt-9B-v0.1 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.744536718130117}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Gen6-gemma-2-9B | 2dc22f82-e2fb-4690-b8e6-8c77b9bc9c45 | 0.0.1 | hfopenllm_v2/zelk12_MT-Gen6-gemma-2-9B/1762652580.614364 | 1762652580.6143649 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Gen6-gemma-2-9B | zelk12/MT-Gen6-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1615668648075994}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT1-Gen2-gemma-2-9B | 2871c1f6-4010-48e4-8020-1c5024474934 | 0.0.1 | hfopenllm_v2/zelk12_MT1-Gen2-gemma-2-9B/1762652580.617375 | 1762652580.617376 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT1-Gen2-gemma-2-9B | zelk12/MT1-Gen2-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7983672211953173}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/gemma-2-S2MTM-9B | e0eb1bbf-923b-4bee-8390-288c21607e0e | 0.0.1 | hfopenllm_v2/zelk12_gemma-2-S2MTM-9B/1762652580.628712 | 1762652580.628713 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/gemma-2-S2MTM-9B | zelk12/gemma-2-S2MTM-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7822555264476034}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT1-Gen3-gemma-2-9B | 69b008dd-f8ad-49ce-9bca-fff2e2ce6b72 | 0.0.1 | hfopenllm_v2/zelk12_MT1-Gen3-gemma-2-9B/1762652580.617578 | 1762652580.617579 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT1-Gen3-gemma-2-9B | zelk12/MT1-Gen3-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.795969139660545}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT2-Gen6-gemma-2-9B | 35e1f76a-96d6-42af-a51b-b1b453536723 | 0.0.1 | hfopenllm_v2/zelk12_MT2-Gen6-gemma-2-9B/1762652580.620769 | 1762652580.620769 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT2-Gen6-gemma-2-9B | zelk12/MT2-Gen6-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16641289556155447}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT3-Gen4-gemma-2-9B | 96b38b17-8c70-4ecf-beb5-8e6ed84942ac | 0.0.1 | hfopenllm_v2/zelk12_MT3-Gen4-gemma-2-9B/1762652580.6226869 | 1762652580.622689 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT3-Gen4-gemma-2-9B | zelk12/MT3-Gen4-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7737126380226687}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT4-Gen2-gemma-2-9B | e7f0b28a-32c6-4faf-9cb4-c2ee4a075135 | 0.0.1 | hfopenllm_v2/zelk12_MT4-Gen2-gemma-2-9B/1762652580.6242292 | 1762652580.62423 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT4-Gen2-gemma-2-9B | zelk12/MT4-Gen2-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8050616807847621}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT4-Max-Merge_02012025163610-gemma-2-9B | ae4224f6-36e8-48e2-a0bf-a79299c365ad | 0.0.1 | hfopenllm_v2/zelk12_MT4-Max-Merge_02012025163610-gemma-2-9B/1762652580.625107 | 1762652580.625107 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT4-Max-Merge_02012025163610-gemma-2-9B | zelk12/MT4-Max-Merge_02012025163610-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1770790391716202}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Gen7-gemma-2-9B | 29e65163-3e59-4bfe-a950-60092cb3171f | 0.0.1 | hfopenllm_v2/zelk12_MT-Gen7-gemma-2-9B/1762652580.614857 | 1762652580.614858 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Gen7-gemma-2-9B | zelk12/MT-Gen7-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16641289556155447}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT4-Gen1-gemma-2-9B | 6e5b6be6-cc1d-4a03-8e5e-eeede4ee4298 | 0.0.1 | hfopenllm_v2/zelk12_MT4-Gen1-gemma-2-9B/1762652580.624031 | 1762652580.624032 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT4-Gen1-gemma-2-9B | zelk12/MT4-Gen1-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7894996387363307}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT2-Gen2-gemma-2-9B | 556a83e2-9b7c-432e-99d5-804da880dfc6 | 0.0.1 | hfopenllm_v2/zelk12_MT2-Gen2-gemma-2-9B/1762652580.6198761 | 1762652580.619877 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT2-Gen2-gemma-2-9B | zelk12/MT2-Gen2-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7889001183526376}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Merge4-gemma-2-9B | 7b515db9-e76c-495f-b4f8-a65b913f40e9 | 0.0.1 | hfopenllm_v2/zelk12_MT-Merge4-gemma-2-9B/1762652580.616342 | 1762652580.616342 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Merge4-gemma-2-9B | zelk12/MT-Merge4-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7807317916461656}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.25 | b9ce6ed3-132a-44ed-9efc-dbfcc83d6799 | 0.0.1 | hfopenllm_v2/zelk12_recoilme-gemma-2-Ataraxy-9B-v0.1-t0.25/1762652580.630025 | 1762652580.630029 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.25 | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.1-t0.25 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7706651684197928}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Merge1-gemma-2-9B | 0e6d9dcd-e9b7-4638-ac0a-d0600fbb27d8 | 0.0.1 | hfopenllm_v2/zelk12_MT-Merge1-gemma-2-9B/1762652580.615506 | 1762652580.615506 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Merge1-gemma-2-9B | zelk12/MT-Merge1-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7901490268044344}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MTM-Merge-gemma-2-9B | e0354dac-3ad8-4342-92a9-be0182051cac | 0.0.1 | hfopenllm_v2/zelk12_MTM-Merge-gemma-2-9B/1762652580.626984 | 1762652580.626985 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MTM-Merge-gemma-2-9B | zelk12/MTM-Merge-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7798075772284205}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/Rv0.4DMv1t0.25Tt0.25-gemma-2-9B | 64790745-5edc-49d9-8111-822d54518b58 | 0.0.1 | hfopenllm_v2/zelk12_Rv0.4DMv1t0.25Tt0.25-gemma-2-9B/1762652580.627618 | 1762652580.627619 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/Rv0.4DMv1t0.25Tt0.25-gemma-2-9B | zelk12/Rv0.4DMv1t0.25Tt0.25-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7646200968984517}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT1-Gen6-gemma-2-9B | fcf4087e-9d89-4e8a-a817-6c9092445208 | 0.0.1 | hfopenllm_v2/zelk12_MT1-Gen6-gemma-2-9B/1762652580.618452 | 1762652580.618453 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT1-Gen6-gemma-2-9B | zelk12/MT1-Gen6-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16336542595867853}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Gen6fix-gemma-2-9B | 0c2ec793-573d-4fb5-abc3-4aef4a8e2e72 | 0.0.1 | hfopenllm_v2/zelk12_MT-Gen6fix-gemma-2-9B/1762652580.614617 | 1762652580.614618 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Gen6fix-gemma-2-9B | zelk12/MT-Gen6fix-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.15759518078697854}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Gen1-gemma-2-9B | 119f453d-714d-4324-aac5-8448bab91771 | 0.0.1 | hfopenllm_v2/zelk12_MT-Gen1-gemma-2-9B/1762652580.613055 | 1762652580.613056 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Gen1-gemma-2-9B | zelk12/MT-Gen1-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7886252920029965}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT3-Gen6-gemma-2-9B | 9f093c1a-eabc-4ee3-9e43-9ac0bc3afa08 | 0.0.1 | hfopenllm_v2/zelk12_MT3-Gen6-gemma-2-9B/1762652580.623395 | 1762652580.623395 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT3-Gen6-gemma-2-9B | zelk12/MT3-Gen6-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17615482475387528}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT1-Gen7-gemma-2-9B | 5b8bdeea-19cf-41c0-890a-55ae1b740e75 | 0.0.1 | hfopenllm_v2/zelk12_MT1-Gen7-gemma-2-9B/1762652580.6186602 | 1762652580.6186612 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT1-Gen7-gemma-2-9B | zelk12/MT1-Gen7-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16336542595867853}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-gemma-2-9B | 061fc038-b3fd-4d5b-8ab7-7f3713ad9e55 | 0.0.1 | hfopenllm_v2/zelk12_MT-gemma-2-9B/1762652580.616956 | 1762652580.616957 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-gemma-2-9B | zelk12/MT-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7968434863938794}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.2 | 7f429355-b60b-4298-8eb0-a072a80898d7 | 0.0.1 | hfopenllm_v2/zelk12_recoilme-gemma-2-Ataraxy-9B-v0.2/1762652580.6306539 | 1762652580.6306539 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.2 | zelk12/recoilme-gemma-2-Ataraxy-9B-v0.2 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.759999024809727}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Gen5-gemma-2-9B | 4431b126-a8b8-4776-8dd5-448ec4fb0caf | 0.0.1 | hfopenllm_v2/zelk12_MT-Gen5-gemma-2-9B/1762652580.614163 | 1762652580.614163 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Gen5-gemma-2-9B | zelk12/MT-Gen5-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7923221496739761}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT3-Gen5-gemma-2-9B | 53dc50c8-fa89-4d31-92d6-f8b02543e272 | 0.0.1 | hfopenllm_v2/zelk12_MT3-Gen5-gemma-2-9B/1762652580.622956 | 1762652580.622956 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT3-Gen5-gemma-2-9B | zelk12/MT3-Gen5-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7990166092634211}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/T31122024203920-gemma-2-9B | f1312aef-339c-487a-b0fa-1bf4a77f0910 | 0.0.1 | hfopenllm_v2/zelk12_T31122024203920-gemma-2-9B/1762652580.628056 | 1762652580.628057 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/T31122024203920-gemma-2-9B | zelk12/T31122024203920-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7676176988169169}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT3-Gen3-gemma-2-9B | 71710546-99cb-4180-9454-1e77696fccf3 | 0.0.1 | hfopenllm_v2/zelk12_MT3-Gen3-gemma-2-9B/1762652580.622438 | 1762652580.622439 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT3-Gen3-gemma-2-9B | zelk12/MT3-Gen3-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7856276900845313}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT2-Gen1-gemma-2-9B | e6c0f96c-6189-4ed1-bf68-e762249170e7 | 0.0.1 | hfopenllm_v2/zelk12_MT2-Gen1-gemma-2-9B/1762652580.619495 | 1762652580.619499 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT2-Gen1-gemma-2-9B | zelk12/MT2-Gen1-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7855778224001206}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Merge2-gemma-2-9B | 75c81dae-2bb9-4d60-94e2-61141c31ccbd | 0.0.1 | hfopenllm_v2/zelk12_MT-Merge2-gemma-2-9B/1762652580.615932 | 1762652580.615933 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Merge2-gemma-2-9B | zelk12/MT-Merge2-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7877010775852515}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT-Merge5-gemma-2-9B | f9e1d208-d1ab-4518-9b1b-1470af8bef12 | 0.0.1 | hfopenllm_v2/zelk12_MT-Merge5-gemma-2-9B/1762652580.616543 | 1762652580.616544 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT-Merge5-gemma-2-9B | zelk12/MT-Merge5-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7843787816327346}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | zelk12/MT5-Gen3-gemma-2-9B | 1ff959c7-3477-40e5-8460-971337adc788 | 0.0.1 | hfopenllm_v2/zelk12_MT5-Gen3-gemma-2-9B/1762652580.625941 | 1762652580.625942 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | zelk12/MT5-Gen3-gemma-2-9B | zelk12/MT5-Gen3-gemma-2-9B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7825303527972447}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForCausalLM", "params_billions": 10.159} |
HF Open LLM v2 | google | google/mt5-base | 621fb00c-90a0-4295-9bd6-f5e102bc0bab | 0.0.1 | hfopenllm_v2/google_mt5-base/1762652580.178463 | 1762652580.178463 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | google/mt5-base | google/mt5-base | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1645157072124186}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MT5ForConditionalGeneration", "params_billions": 0.39} |
HF Open LLM v2 | google | Skywork/Skywork-Reward-Gemma-2-27B-v0.2 | 140b0661-2961-46f3-8c75-cb75147e0acc | 0.0.1 | hfopenllm_v2/Skywork_Skywork-Reward-Gemma-2-27B-v0.2/1762652579.8884969 | 1762652579.8884978 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Skywork/Skywork-Reward-Gemma-2-27B-v0.2 | Skywork/Skywork-Reward-Gemma-2-27B-v0.2 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7807317916461656}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Gemma2ForSequenceClassification", "params_billions": 27.227} |
HF Open LLM v2 | google | NAPS-ai/naps-gemma-2-27b-v-0.1.0 | 8768f068-452f-4a54-bddb-9f6cffaf5a19 | 0.0.1 | hfopenllm_v2/NAPS-ai_naps-gemma-2-27b-v-0.1.0/1762652579.7653928 | 1762652579.765394 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NAPS-ai/naps-gemma-2-27b-v-0.1.0 | NAPS-ai/naps-gemma-2-27b-v-0.1.0 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.0}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH", "lower_is_be... | {"precision": "float16", "architecture": "Gemma2ForCausalLM", "params_billions": 27.227} |
HF Open LLM v2 | google | NAPS-ai/naps-gemma-2-27b-v0.1.0 | b004d154-392d-4f31-afbb-547b058996bd | 0.0.1 | hfopenllm_v2/NAPS-ai_naps-gemma-2-27b-v0.1.0/1762652579.765648 | 1762652579.7656488 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NAPS-ai/naps-gemma-2-27b-v0.1.0 | NAPS-ai/naps-gemma-2-27b-v0.1.0 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.0}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH", "lower_is_be... | {"precision": "float16", "architecture": "Gemma2ForCausalLM", "params_billions": 27.227} |
HF Open LLM v2 | google | nidum/Nidum-Limitless-Gemma-2B | 49e352c1-2319-4bc5-aa3f-1697739a05b8 | 0.0.1 | hfopenllm_v2/nidum_Nidum-Limitless-Gemma-2B/1762652580.406632 | 1762652580.406633 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nidum/Nidum-Limitless-Gemma-2B | nidum/Nidum-Limitless-Gemma-2B | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.24235140538216376}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GemmaForCausalLM", "params_billions": 2.506} |
HF Open LLM v2 | google | beomi/gemma-mling-7b | 2568a2b7-e95c-4224-9850-5816466b50f2 | 0.0.1 | hfopenllm_v2/beomi_gemma-mling-7b/1762652580.030431 | 1762652580.030431 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | beomi/gemma-mling-7b | beomi/gemma-mling-7b | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20290939152559653}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GemmaForCausalLM", "params_billions": 8.538} |
HF Open LLM v2 | google | EpistemeAI2/Athene-codegemma-2-7b-it-alpaca-v1.2 | ea4bffba-6e14-4380-a060-2b4deb6d94c0 | 0.0.1 | hfopenllm_v2/EpistemeAI2_Athene-codegemma-2-7b-it-alpaca-v1.2/1762652579.609552 | 1762652579.6095529 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EpistemeAI2/Athene-codegemma-2-7b-it-alpaca-v1.2 | EpistemeAI2/Athene-codegemma-2-7b-it-alpaca-v1.2 | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4351177098986245}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "GemmaForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | google | GenVRadmin/AryaBhatta-GemmaUltra-Merged | 4aca90c3-b0c0-4ec6-ba6b-0d5b09ef63fe | 0.0.1 | hfopenllm_v2/GenVRadmin_AryaBhatta-GemmaUltra-Merged/1762652579.627715 | 1762652579.627716 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | GenVRadmin/AryaBhatta-GemmaUltra-Merged | GenVRadmin/AryaBhatta-GemmaUltra-Merged | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.30207737691547315}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GemmaForCausalLM", "params_billions": 8.538} |
HF Open LLM v2 | google | GenVRadmin/AryaBhatta-GemmaOrca-2-Merged | d4bb122a-87b4-482e-8050-7c1716a4ed5b | 0.0.1 | hfopenllm_v2/GenVRadmin_AryaBhatta-GemmaOrca-2-Merged/1762652579.627253 | 1762652579.627253 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | GenVRadmin/AryaBhatta-GemmaOrca-2-Merged | GenVRadmin/AryaBhatta-GemmaOrca-2-Merged | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.30637375497014585}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GemmaForCausalLM", "params_billions": 8.538} |
HF Open LLM v2 | google | GenVRadmin/AryaBhatta-GemmaOrca-Merged | 179d4baf-7da1-4a56-82e7-35ea45204e13 | 0.0.1 | hfopenllm_v2/GenVRadmin_AryaBhatta-GemmaOrca-Merged/1762652579.627504 | 1762652579.6275048 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | GenVRadmin/AryaBhatta-GemmaOrca-Merged | GenVRadmin/AryaBhatta-GemmaOrca-Merged | google | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.30637375497014585}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GemmaForCausalLM", "params_billions": 8.538} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.