_leaderboard stringclasses 1
value | _developer stringclasses 559
values | _model stringlengths 9 102 | _uuid stringlengths 36 36 | schema_version stringclasses 1
value | evaluation_id stringlengths 35 133 | retrieved_timestamp stringlengths 13 18 | source_data stringclasses 1
value | evaluation_source_name stringclasses 1
value | evaluation_source_type stringclasses 1
value | source_organization_name stringclasses 1
value | source_organization_url null | source_organization_logo_url null | evaluator_relationship stringclasses 1
value | model_name stringlengths 4 102 | model_id stringlengths 9 102 | model_developer stringclasses 559
values | model_inference_platform stringclasses 1
value | evaluation_results stringlengths 1.35k 1.41k | additional_details stringclasses 660
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
HF Open LLM v2 | alibaba | MaziyarPanahi/calme-2.4-qwen2-7b | 5f54ee4a-42e8-4dd0-88bc-915d2f1971a0 | 0.0.1 | hfopenllm_v2/MaziyarPanahi_calme-2.4-qwen2-7b/1762652579.756743 | 1762652579.756744 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | MaziyarPanahi/calme-2.4-qwen2-7b | MaziyarPanahi/calme-2.4-qwen2-7b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.32995452067181746}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | MaziyarPanahi/calme-2.2-qwen2-72b | 250897a9-7d48-4323-813d-fa48befe2cbe | 0.0.1 | hfopenllm_v2/MaziyarPanahi_calme-2.2-qwen2-72b/1762652579.753872 | 1762652579.753872 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | MaziyarPanahi/calme-2.2-qwen2-72b | MaziyarPanahi/calme-2.2-qwen2-72b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8008151704145002}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.706} |
HF Open LLM v2 | alibaba | MaziyarPanahi/calme-2.2-qwen2.5-72b | 1fa2ab02-9a1c-4e7e-95b8-27e78af0ba73 | 0.0.1 | hfopenllm_v2/MaziyarPanahi_calme-2.2-qwen2.5-72b/1762652579.754294 | 1762652579.754294 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | MaziyarPanahi/calme-2.2-qwen2.5-72b | MaziyarPanahi/calme-2.2-qwen2.5-72b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8476763875406145}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.7} |
HF Open LLM v2 | alibaba | MaziyarPanahi/calme-2.7-qwen2-7b | f592bc27-c97c-4b14-abcf-30782d8c0056 | 0.0.1 | hfopenllm_v2/MaziyarPanahi_calme-2.7-qwen2-7b/1762652579.757804 | 1762652579.757805 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | MaziyarPanahi/calme-2.7-qwen2-7b | MaziyarPanahi/calme-2.7-qwen2-7b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3592301759331906}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | MaziyarPanahi/calme-2.3-qwen2-7b | 3272e904-21d5-4116-abde-0e74fe48b9d5 | 0.0.1 | hfopenllm_v2/MaziyarPanahi_calme-2.3-qwen2-7b/1762652579.755967 | 1762652579.755968 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | MaziyarPanahi/calme-2.3-qwen2-7b | MaziyarPanahi/calme-2.3-qwen2-7b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3824862476008103}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | MaziyarPanahi/calme-2.6-qwen2-7b | 65f44cf9-f619-4f43-a03f-09e22386d319 | 0.0.1 | hfopenllm_v2/MaziyarPanahi_calme-2.6-qwen2-7b/1762652579.7575328 | 1762652579.757534 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | MaziyarPanahi/calme-2.6-qwen2-7b | MaziyarPanahi/calme-2.6-qwen2-7b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3442676542684522}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | PJMixers-Dev/Qwen2.5-RomboTiesTest-7B | a954be32-0c84-4ffe-9c4f-7f895c77e197 | 0.0.1 | hfopenllm_v2/PJMixers-Dev_Qwen2.5-RomboTiesTest-7B/1762652579.811478 | 1762652579.81148 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | PJMixers-Dev/Qwen2.5-RomboTiesTest-7B | PJMixers-Dev/Qwen2.5-RomboTiesTest-7B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7558023821238757}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.808} |
HF Open LLM v2 | alibaba | NikolaSigmoid/DeepSeek-R1-Distill-Qwen-1.5B-500 | c0182d01-454b-4194-be7a-81b9a9672d07 | 0.0.1 | hfopenllm_v2/NikolaSigmoid_DeepSeek-R1-Distill-Qwen-1.5B-500/1762652579.783665 | 1762652579.783666 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NikolaSigmoid/DeepSeek-R1-Distill-Qwen-1.5B-500 | NikolaSigmoid/DeepSeek-R1-Distill-Qwen-1.5B-500 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17485715678843247}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.157} |
HF Open LLM v2 | alibaba | win10/EVA-Norns-Qwen2.5-v0.1 | 5b8044df-ce6a-4a5e-9aed-d657188fa114 | 0.0.1 | hfopenllm_v2/win10_EVA-Norns-Qwen2.5-v0.1/1762652580.594388 | 1762652580.594388 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | win10/EVA-Norns-Qwen2.5-v0.1 | win10/EVA-Norns-Qwen2.5-v0.1 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6219630580193884}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | win10/Norns-Qwen2.5-12B | 4ff2e991-ee62-467e-9fec-cdf334ca7fca | 0.0.1 | hfopenllm_v2/win10_Norns-Qwen2.5-12B/1762652580.594881 | 1762652580.594882 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | win10/Norns-Qwen2.5-12B | win10/Norns-Qwen2.5-12B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.48969733640074997}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 12.277} |
HF Open LLM v2 | alibaba | win10/Norns-Qwen2.5-7B | 2451252e-2cf6-4394-9009-544630696c75 | 0.0.1 | hfopenllm_v2/win10_Norns-Qwen2.5-7B/1762652580.5950878 | 1762652580.595089 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | win10/Norns-Qwen2.5-7B | win10/Norns-Qwen2.5-7B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6122211288270678}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | sometimesanotion/Lamarck-14B-v0.4-Qwenvergence | 41393c10-c1e5-4ccd-bcb1-df5392cb8ec6 | 0.0.1 | hfopenllm_v2/sometimesanotion_Lamarck-14B-v0.4-Qwenvergence/1762652580.5196202 | 1762652580.5196211 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Lamarck-14B-v0.4-Qwenvergence | sometimesanotion/Lamarck-14B-v0.4-Qwenvergence | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4906470387460826}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3 | 9e453ef2-bae1-4a06-8778-d9c0dfae33e8 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwen2.5-14B-Vimarckoso-v3/1762652580.52309 | 1762652580.52309 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3 | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7256523801291683}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwen2.5-7B-Gordion-v0.1-Reason | 100a253a-3409-4145-8a9d-0bf821e3ce91 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwen2.5-7B-Gordion-v0.1-Reason/1762652580.5243258 | 1762652580.5243268 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwen2.5-7B-Gordion-v0.1-Reason | sometimesanotion/Qwen2.5-7B-Gordion-v0.1-Reason | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.49172085621705963}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.613} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwen2.5-7B-Gordion-v0.1 | 174b2a17-c4fa-4021-868b-9c23a99603c9 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwen2.5-7B-Gordion-v0.1/1762652580.5239239 | 1762652580.523925 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwen2.5-7B-Gordion-v0.1 | sometimesanotion/Qwen2.5-7B-Gordion-v0.1 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.748183708116686}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.613} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwentinuum-14B-v013 | 8127e367-fbd2-475d-a4f0-b8895dec6741 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwentinuum-14B-v013/1762652580.5250719 | 1762652580.525074 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwentinuum-14B-v013 | sometimesanotion/Qwentinuum-14B-v013 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6711226213114536}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v12-Prose-DS | a6c5b80d-e685-405a-8444-1be1ed763d2e | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v12-Prose-DS/1762652580.52859 | 1762652580.5285912 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v12-Prose-DS | sometimesanotion/Qwenvergence-14B-v12-Prose-DS | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6173419859306639}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v2-Prose | f639d7e3-ffb9-4dc5-ab20-993522afa5b4 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v2-Prose/1762652580.529223 | 1762652580.529224 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v2-Prose | sometimesanotion/Qwenvergence-14B-v2-Prose | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.47048830436574957}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v8 | 9332e745-f594-40a9-af22-98709efc179d | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v8/1762652580.530813 | 1762652580.530813 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v8 | sometimesanotion/Qwenvergence-14B-v8 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5913387589373973}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwentinuum-14B-v7 | 6aaa1633-f780-42d4-b43e-5a4d31cf7aae | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwentinuum-14B-v7/1762652580.526774 | 1762652580.526774 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwentinuum-14B-v7 | sometimesanotion/Qwentinuum-14B-v7 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6109223526908603}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v11 | 50ae9dc0-efcc-43cb-8704-6dfb9270656a | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v11/1762652580.528142 | 1762652580.5281432 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v11 | sometimesanotion/Qwenvergence-14B-v11 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7192327468893647}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v9 | 65c35557-ec37-49c3-b7f6-11ce837500f0 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v9/1762652580.531015 | 1762652580.5310159 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v9 | sometimesanotion/Qwenvergence-14B-v9 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6598070896332842}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwentinuum-14B-v5 | 16e0de9b-9717-4451-babc-8df8748c4efe | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwentinuum-14B-v5/1762652580.5261161 | 1762652580.526117 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwentinuum-14B-v5 | sometimesanotion/Qwentinuum-14B-v5 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.628557782240012}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwentinuum-14B-v6-Prose | 8eecc1a5-d42e-423c-9155-daf66a414361 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwentinuum-14B-v6-Prose/1762652580.52656 | 1762652580.526561 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwentinuum-14B-v6-Prose | sometimesanotion/Qwentinuum-14B-v6-Prose | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5642860942299764}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v13-Prose-DS | f205507c-48ef-4a40-a0e8-39f5f7bf2cdb | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v13-Prose-DS/1762652580.528805 | 1762652580.528806 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v13-Prose-DS | sometimesanotion/Qwenvergence-14B-v13-Prose-DS | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.717808747456748}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v3 | 6cefa467-dae0-4b8b-bd5c-3343f1bfe111 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v3/1762652580.529505 | 1762652580.529512 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v3 | sometimesanotion/Qwenvergence-14B-v3 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.504410519643435}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwen2.5-14B-Vimarckoso | b3b73406-3b25-4a23-9e13-53fafdd66552 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwen2.5-14B-Vimarckoso/1762652580.522644 | 1762652580.522645 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwen2.5-14B-Vimarckoso | sometimesanotion/Qwen2.5-14B-Vimarckoso | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.45742407922091166}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-qv256 | f06fc349-e84e-4ec7-a9c9-8819896c2beb | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-qv256/1762652580.52741 | 1762652580.527411 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-qv256 | sometimesanotion/Qwenvergence-14B-qv256 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7006232352380573}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwentinuum-14B-v1 | c68a024d-fa21-4584-bde5-42121e919af7 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwentinuum-14B-v1/1762652580.5253482 | 1762652580.5253491 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwentinuum-14B-v1 | sometimesanotion/Qwentinuum-14B-v1 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5031616111916382}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v3-Reason | 58ac7b57-e498-4de0-95aa-475c9c56aaf6 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v3-Reason/1762652580.530001 | 1762652580.530001 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v3-Reason | sometimesanotion/Qwenvergence-14B-v3-Reason | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5366837768232734}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v3-Reason | 50c37538-a425-4b30-a9e0-9a60f6b2492f | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v3-Reason/1762652580.530208 | 1762652580.530208 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v3-Reason | sometimesanotion/Qwenvergence-14B-v3-Reason | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5278161943642867}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-model_stock | ba7b8cb4-608a-4bf0-b107-51e721f88dee | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwen2.5-14B-Vimarckoso-v3-model_stock/1762652580.5237172 | 1762652580.5237179 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-model_stock | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-model_stock | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7161852772864887}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v0.6-004-model_stock | 86591e86-5bfb-4e8e-b910-bf6b5011562c | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v0.6-004-model_stock/1762652580.5276191 | 1762652580.52762 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v0.6-004-model_stock | sometimesanotion/Qwenvergence-14B-v0.6-004-model_stock | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6859854076073706}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwentinuum-14B-v6 | 93e0bcb6-be72-4e9c-adbc-c8fce3240b0d | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwentinuum-14B-v6/1762652580.526352 | 1762652580.526353 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwentinuum-14B-v6 | sometimesanotion/Qwentinuum-14B-v6 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6304062110755019}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v6-Prose | fa88bc37-eb6b-4d69-8983-7a489ab09665 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v6-Prose/1762652580.530398 | 1762652580.530399 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v6-Prose | sometimesanotion/Qwenvergence-14B-v6-Prose | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5990073006289978}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwen-2.5-14B-Virmarckeoso | dc7af75a-f45a-449a-b6ba-cc033d7de79f | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwen-2.5-14B-Virmarckeoso/1762652580.5224378 | 1762652580.522439 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwen-2.5-14B-Virmarckeoso | sometimesanotion/Qwen-2.5-14B-Virmarckeoso | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4813295389566351}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v6-Prose-model_stock | 7f57b41f-d8e8-46a0-ad1f-2638e287bce7 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v6-Prose-model_stock/1762652580.530609 | 1762652580.5306098 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v6-Prose-model_stock | sometimesanotion/Qwenvergence-14B-v6-Prose-model_stock | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.48110458029140457}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwentinuum-14B-v8 | 6be09829-08e5-4d45-a091-5451f6c74d51 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwentinuum-14B-v8/1762652580.526987 | 1762652580.526987 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwentinuum-14B-v8 | sometimesanotion/Qwentinuum-14B-v8 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5411552458587658}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-Prose01 | dd84656a-3b61-4241-a2eb-a5f52ff58ed2 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwen2.5-14B-Vimarckoso-v3-Prose01/1762652580.523516 | 1762652580.523516 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-Prose01 | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-Prose01 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6872343160591674}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v15-Prose-MS | a9434630-a7cd-4dc1-b542-e76402344166 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v15-Prose-MS/1762652580.529013 | 1762652580.529014 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v15-Prose-MS | sometimesanotion/Qwenvergence-14B-v15-Prose-MS | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5032114788760489}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwentinuum-14B-v2 | ce1feb87-4f78-4ff1-a548-b3409591166f | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwentinuum-14B-v2/1762652580.525585 | 1762652580.525586 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwentinuum-14B-v2 | sometimesanotion/Qwentinuum-14B-v2 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5378329499062487}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v3-Prose | 37c4d6b3-9964-45d3-a6ed-8b84229ed304 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v3-Prose/1762652580.5297742 | 1762652580.5297751 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v3-Prose | sometimesanotion/Qwenvergence-14B-v3-Prose | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.49177072390147036}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwen-14B-ProseStock-v4 | e68bc90b-1274-4e28-b280-65e6ceba53f8 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwen-14B-ProseStock-v4/1762652580.522184 | 1762652580.5221848 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwen-14B-ProseStock-v4 | sometimesanotion/Qwen-14B-ProseStock-v4 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4942186731206532}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v12-Prose | 052e63b2-028b-4a4a-ae2b-51514e982239 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v12-Prose/1762652580.52837 | 1762652580.5283709 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v12-Prose | sometimesanotion/Qwenvergence-14B-v12-Prose | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5412051135431766}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwenvergence-14B-v10 | f2b35397-f539-4129-8e1f-f9dae9c9431b | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwenvergence-14B-v10/1762652580.5278451 | 1762652580.5278451 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwenvergence-14B-v10 | sometimesanotion/Qwenvergence-14B-v10 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6756938257157675}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-IF-Variant | 9df5ab5a-16cf-478f-87f0-1b8717e1e330 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwen2.5-14B-Vimarckoso-v3-IF-Variant/1762652580.523307 | 1762652580.523308 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-IF-Variant | sometimesanotion/Qwen2.5-14B-Vimarckoso-v3-IF-Variant | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6412973133507981}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwentinuum-14B-v3 | 96b75db5-4e23-4179-bbf7-801f35d31af7 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwentinuum-14B-v3/1762652580.525815 | 1762652580.525816 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwentinuum-14B-v3 | sometimesanotion/Qwentinuum-14B-v3 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6157683834448153}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwentessential-14B-v1 | 3cce1e77-5dfc-44d2-b0c2-f7220d989e9d | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwentessential-14B-v1/1762652580.524672 | 1762652580.524674 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwentessential-14B-v1 | sometimesanotion/Qwentessential-14B-v1 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6279083941719084}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwentinuum-14B-v9 | cea3e14d-a43d-4e32-b8fc-d8ae995190d8 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwentinuum-14B-v9/1762652580.5271978 | 1762652580.527199 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwentinuum-14B-v9 | sometimesanotion/Qwentinuum-14B-v9 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5107304175144174}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwen2.5-14B-Vimarckoso-v2 | 5242491e-deb4-41ae-8d70-5b0d8ffb7bc7 | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwen2.5-14B-Vimarckoso-v2/1762652580.52286 | 1762652580.522861 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwen2.5-14B-Vimarckoso-v2 | sometimesanotion/Qwen2.5-14B-Vimarckoso-v2 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4505301488938239}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.0} |
HF Open LLM v2 | alibaba | sometimesanotion/Qwen2.5-7B-Gordion-v0.1-Prose | dceb35c6-30bb-483c-aa62-8273b409311b | 0.0.1 | hfopenllm_v2/sometimesanotion_Qwen2.5-7B-Gordion-v0.1-Prose/1762652580.524123 | 1762652580.524123 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sometimesanotion/Qwen2.5-7B-Gordion-v0.1-Prose | sometimesanotion/Qwen2.5-7B-Gordion-v0.1-Prose | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5347101246913745}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.613} |
HF Open LLM v2 | alibaba | v000000/Qwen2.5-14B-Gutenberg-1e-Delta | 676745af-1929-4875-9a78-d57354883d75 | 0.0.1 | hfopenllm_v2/v000000_Qwen2.5-14B-Gutenberg-1e-Delta/1762652580.584905 | 1762652580.584906 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | v000000/Qwen2.5-14B-Gutenberg-1e-Delta | v000000/Qwen2.5-14B-Gutenberg-1e-Delta | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8045120280854798}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | alibaba | v000000/Qwen2.5-Lumen-14B | 7b134cb3-7794-4984-9240-b889e2a3b6b4 | 0.0.1 | hfopenllm_v2/v000000_Qwen2.5-Lumen-14B/1762652580.585356 | 1762652580.585357 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | v000000/Qwen2.5-Lumen-14B | v000000/Qwen2.5-Lumen-14B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8063604569209697}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | alibaba | EVA-UNIT-01/EVA-Qwen2.5-72B-v0.2 | 9e315ba7-3eea-4934-822e-461e64bf8551 | 0.0.1 | hfopenllm_v2/EVA-UNIT-01_EVA-Qwen2.5-72B-v0.2/1762652579.59233 | 1762652579.592331 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EVA-UNIT-01/EVA-Qwen2.5-72B-v0.2 | EVA-UNIT-01/EVA-Qwen2.5-72B-v0.2 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6878837041272712}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.706} |
HF Open LLM v2 | alibaba | EVA-UNIT-01/EVA-Qwen2.5-14B-v0.2 | 3ba36700-5019-4525-bf5e-6a87cce7ecc5 | 0.0.1 | hfopenllm_v2/EVA-UNIT-01_EVA-Qwen2.5-14B-v0.2/1762652579.5920892 | 1762652579.5920892 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EVA-UNIT-01/EVA-Qwen2.5-14B-v0.2 | EVA-UNIT-01/EVA-Qwen2.5-14B-v0.2 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4038429145777648}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | alibaba | Aashraf995/Qwen-Evo-7B | 705ae322-fed9-4a98-a79e-e0b289065ba9 | 0.0.1 | hfopenllm_v2/Aashraf995_Qwen-Evo-7B/1762652579.4765608 | 1762652579.476562 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Aashraf995/Qwen-Evo-7B | Aashraf995/Qwen-Evo-7B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4757343847657549}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | Aashraf995/QwenStock-14B | 7888b813-8ef1-4367-8168-edd1bd3c7888 | 0.0.1 | hfopenllm_v2/Aashraf995_QwenStock-14B/1762652579.476816 | 1762652579.476817 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Aashraf995/QwenStock-14B | Aashraf995/QwenStock-14B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5008632650256873}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | TIGER-Lab/Qwen2.5-Math-7B-CFT | 07e72fc4-9c37-4a81-a788-8619035c66d3 | 0.0.1 | hfopenllm_v2/TIGER-Lab_Qwen2.5-Math-7B-CFT/1762652579.911227 | 1762652579.911228 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TIGER-Lab/Qwen2.5-Math-7B-CFT | TIGER-Lab/Qwen2.5-Math-7B-CFT | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2776976200924658}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | TIGER-Lab/AceCoder-Qwen2.5-Coder-7B-Ins-Rule | f75e2bca-e300-4b3c-a5aa-f6aae03e7330 | 0.0.1 | hfopenllm_v2/TIGER-Lab_AceCoder-Qwen2.5-Coder-7B-Ins-Rule/1762652579.910825 | 1762652579.910826 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TIGER-Lab/AceCoder-Qwen2.5-Coder-7B-Ins-Rule | TIGER-Lab/AceCoder-Qwen2.5-Coder-7B-Ins-Rule | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6222378843690297}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | TIGER-Lab/AceCoder-Qwen2.5-7B-Ins-Rule | 7621e05b-1b5e-43e5-a65c-322334575e68 | 0.0.1 | hfopenllm_v2/TIGER-Lab_AceCoder-Qwen2.5-7B-Ins-Rule/1762652579.910362 | 1762652579.910363 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TIGER-Lab/AceCoder-Qwen2.5-7B-Ins-Rule | TIGER-Lab/AceCoder-Qwen2.5-7B-Ins-Rule | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.742413462944986}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | TIGER-Lab/AceCoder-Qwen2.5-Coder-7B-Base-Rule | f6223009-028e-4063-90ce-e008a3b5b284 | 0.0.1 | hfopenllm_v2/TIGER-Lab_AceCoder-Qwen2.5-Coder-7B-Base-Rule/1762652579.910613 | 1762652579.910613 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TIGER-Lab/AceCoder-Qwen2.5-Coder-7B-Base-Rule | TIGER-Lab/AceCoder-Qwen2.5-Coder-7B-Base-Rule | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.44076273177391545}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | Rombo-Org/Rombo-LLM-V2.5-Qwen-7b | 8713e6fb-8843-43f2-af3b-57a59d326670 | 0.0.1 | hfopenllm_v2/Rombo-Org_Rombo-LLM-V2.5-Qwen-7b/1762652579.854495 | 1762652579.854495 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Rombo-Org/Rombo-LLM-V2.5-Qwen-7b | Rombo-Org/Rombo-LLM-V2.5-Qwen-7b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.748183708116686}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | huihui-ai/DeepSeek-R1-Distill-Qwen-14B-abliterated-v2 | 69d04754-3779-4408-9aa9-68c9ba65de7a | 0.0.1 | hfopenllm_v2/huihui-ai_DeepSeek-R1-Distill-Qwen-14B-abliterated-v2/1762652580.200386 | 1762652580.200386 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | huihui-ai/DeepSeek-R1-Distill-Qwen-14B-abliterated-v2 | huihui-ai/DeepSeek-R1-Distill-Qwen-14B-abliterated-v2 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.42112927033604175}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | alibaba | kms7530/chemeng_qwen-math-7b_24_1_100_1 | af7f201f-3af3-4ffb-9416-c83235851cb6 | 0.0.1 | hfopenllm_v2/kms7530_chemeng_qwen-math-7b_24_1_100_1/1762652580.310198 | 1762652580.310199 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | kms7530/chemeng_qwen-math-7b_24_1_100_1 | kms7530/chemeng_qwen-math-7b_24_1_100_1 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.211052230304481}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "?", "params_billions": 8.911} |
HF Open LLM v2 | alibaba | kms7530/chemeng_qwen-math-7b_24_1_100_1_nonmath | 8ae7c857-be7e-463e-86c2-6b165920a45c | 0.0.1 | hfopenllm_v2/kms7530_chemeng_qwen-math-7b_24_1_100_1_nonmath/1762652580.310462 | 1762652580.310463 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | kms7530/chemeng_qwen-math-7b_24_1_100_1_nonmath | kms7530/chemeng_qwen-math-7b_24_1_100_1_nonmath | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.25836336476105626}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "?", "params_billions": 15.231} |
HF Open LLM v2 | alibaba | Marsouuu/MiniQwenMathExpert-ECE-PRYMMAL-Martial | f1b6c510-02fe-4ffd-96da-4cfcfb04eb8c | 0.0.1 | hfopenllm_v2/Marsouuu_MiniQwenMathExpert-ECE-PRYMMAL-Martial/1762652579.747411 | 1762652579.747412 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Marsouuu/MiniQwenMathExpert-ECE-PRYMMAL-Martial | Marsouuu/MiniQwenMathExpert-ECE-PRYMMAL-Martial | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2794961812435449}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.777} |
HF Open LLM v2 | alibaba | Replete-AI/Replete-Coder-Qwen2-1.5b | 1ff6b76b-7241-4f06-9db5-4594d3ff7a3f | 0.0.1 | hfopenllm_v2/Replete-AI_Replete-Coder-Qwen2-1.5b/1762652579.852138 | 1762652579.852139 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Replete-AI/Replete-Coder-Qwen2-1.5b | Replete-AI/Replete-Coder-Qwen2-1.5b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.30142798884736943}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.544} |
HF Open LLM v2 | alibaba | Replete-AI/Replete-LLM-Qwen2-7b | 20a6e090-2c78-4eb9-870e-9abbcbada6f9 | 0.0.1 | hfopenllm_v2/Replete-AI_Replete-LLM-Qwen2-7b/1762652579.852611 | 1762652579.852612 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Replete-AI/Replete-LLM-Qwen2-7b | Replete-AI/Replete-LLM-Qwen2-7b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.09324813716494457}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | Replete-AI/Replete-LLM-Qwen2-7b | a846978d-de78-48e8-a738-54c732e50c28 | 0.0.1 | hfopenllm_v2/Replete-AI_Replete-LLM-Qwen2-7b/1762652579.8524 | 1762652579.8524008 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Replete-AI/Replete-LLM-Qwen2-7b | Replete-AI/Replete-LLM-Qwen2-7b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.09047549391170981}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | Replete-AI/Replete-LLM-Qwen2-7b_Beta-Preview | 4977e0d5-1446-41ba-b00b-e8236c896d2e | 0.0.1 | hfopenllm_v2/Replete-AI_Replete-LLM-Qwen2-7b_Beta-Preview/1762652579.852791 | 1762652579.852791 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Replete-AI/Replete-LLM-Qwen2-7b_Beta-Preview | Replete-AI/Replete-LLM-Qwen2-7b_Beta-Preview | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.08575468645416384}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | Nexesenex/Qwen_2.5_3b_Smarteaz_0.01a | eaf601d2-f285-4b0c-b3ab-5d029b8fe20f | 0.0.1 | hfopenllm_v2/Nexesenex_Qwen_2.5_3b_Smarteaz_0.01a/1762652579.782197 | 1762652579.782198 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Nexesenex/Qwen_2.5_3b_Smarteaz_0.01a | Nexesenex/Qwen_2.5_3b_Smarteaz_0.01a | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4011954946209391}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 3.085} |
HF Open LLM v2 | alibaba | 1-800-LLMs/Qwen-2.5-14B-Hindi | 21ba6052-9614-454e-999d-ef4f0f693c6c | 0.0.1 | hfopenllm_v2/1-800-LLMs_Qwen-2.5-14B-Hindi/1762652579.467683 | 1762652579.4676852 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | 1-800-LLMs/Qwen-2.5-14B-Hindi | 1-800-LLMs/Qwen-2.5-14B-Hindi | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.582570911847232}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | alibaba | nisten/franqwenstein-35b | 3e3344d2-6911-4d5f-85d6-6593cbed3b49 | 0.0.1 | hfopenllm_v2/nisten_franqwenstein-35b/1762652580.407119 | 1762652580.40712 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nisten/franqwenstein-35b | nisten/franqwenstein-35b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.39135383005979685}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 34.714} |
HF Open LLM v2 | alibaba | nisten/franqwenstein-35b | ff90ed4a-6dcf-4b9b-9d3a-19f933e2c0c8 | 0.0.1 | hfopenllm_v2/nisten_franqwenstein-35b/1762652580.406877 | 1762652580.406878 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nisten/franqwenstein-35b | nisten/franqwenstein-35b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.37986320740080765}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 34.714} |
HF Open LLM v2 | alibaba | nisten/tqwendo-36b | 3a5b1794-12f1-4004-bdb2-309cc950c757 | 0.0.1 | hfopenllm_v2/nisten_tqwendo-36b/1762652580.40731 | 1762652580.4073112 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | nisten/tqwendo-36b | nisten/tqwendo-36b | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6777672132164878}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 35.69} |
HF Open LLM v2 | alibaba | deepseek-ai/DeepSeek-R1-Distill-Qwen-7B | 4cb8eae2-bc55-4adb-a4eb-1fc9eb29d891 | 0.0.1 | hfopenllm_v2/deepseek-ai_DeepSeek-R1-Distill-Qwen-7B/1762652580.1228092 | 1762652580.1228101 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | deepseek-ai/DeepSeek-R1-Distill-Qwen-7B | deepseek-ai/DeepSeek-R1-Distill-Qwen-7B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.40376866713653103}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B | d38f0e3a-e89e-4af6-95b2-8230b6a84ec3 | 0.0.1 | hfopenllm_v2/deepseek-ai_DeepSeek-R1-Distill-Qwen-1.5B/1762652580.121964 | 1762652580.1219652 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B | deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.34634104176917246}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.777} |
HF Open LLM v2 | alibaba | deepseek-ai/DeepSeek-R1-Distill-Qwen-32B | 6731c6b8-0b23-4fc2-b284-01025ce30887 | 0.0.1 | hfopenllm_v2/deepseek-ai_DeepSeek-R1-Distill-Qwen-32B/1762652580.12255 | 1762652580.1225522 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | deepseek-ai/DeepSeek-R1-Distill-Qwen-32B | deepseek-ai/DeepSeek-R1-Distill-Qwen-32B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4186314534324481}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 32.764} |
HF Open LLM v2 | alibaba | deepseek-ai/DeepSeek-R1-Distill-Qwen-14B | 77e70ef3-fef2-4b75-9221-b165ec29f31e | 0.0.1 | hfopenllm_v2/deepseek-ai_DeepSeek-R1-Distill-Qwen-14B/1762652580.122241 | 1762652580.122248 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | deepseek-ai/DeepSeek-R1-Distill-Qwen-14B | deepseek-ai/DeepSeek-R1-Distill-Qwen-14B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.43816517950150047}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | alibaba | newsbang/Homer-v0.4-Qwen2.5-7B | 9a022bdc-d1b8-4f2e-a1af-6cd3bad6bded | 0.0.1 | hfopenllm_v2/newsbang_Homer-v0.4-Qwen2.5-7B/1762652580.403887 | 1762652580.4038882 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | newsbang/Homer-v0.4-Qwen2.5-7B | newsbang/Homer-v0.4-Qwen2.5-7B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.799940823681166}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | newsbang/Homer-v0.3-Qwen2.5-7B | 0bc5145c-90d0-4a8b-89c6-0b03aa9d0ee1 | 0.0.1 | hfopenllm_v2/newsbang_Homer-v0.3-Qwen2.5-7B/1762652580.4035761 | 1762652580.403577 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | newsbang/Homer-v0.3-Qwen2.5-7B | newsbang/Homer-v0.3-Qwen2.5-7B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5154013572875525}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | newsbang/Homer-v0.5-Qwen2.5-7B | dc22ad83-0752-4f5e-97ac-733ef6c6cf53 | 0.0.1 | hfopenllm_v2/newsbang_Homer-v0.5-Qwen2.5-7B/1762652580.404095 | 1762652580.404096 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | newsbang/Homer-v0.5-Qwen2.5-7B | newsbang/Homer-v0.5-Qwen2.5-7B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7880756393037142}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | newsbang/Homer-v1.0-Qwen2.5-7B | 1fe21571-0375-43c3-8071-1aaaf0223baa | 0.0.1 | hfopenllm_v2/newsbang_Homer-v1.0-Qwen2.5-7B/1762652580.404567 | 1762652580.404568 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | newsbang/Homer-v1.0-Qwen2.5-7B | newsbang/Homer-v1.0-Qwen2.5-7B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6392737935344885}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | newsbang/Homer-v1.0-Qwen2.5-72B | 3ebdda73-1c41-4a98-b3cf-ac5d482c8b5c | 0.0.1 | hfopenllm_v2/newsbang_Homer-v1.0-Qwen2.5-72B/1762652580.404309 | 1762652580.40431 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | newsbang/Homer-v1.0-Qwen2.5-72B | newsbang/Homer-v1.0-Qwen2.5-72B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7627716680629618}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.706} |
HF Open LLM v2 | alibaba | TheTsar1209/qwen-carpmuscle-v0.2 | eed9909e-db3e-4d6a-8caa-3f208ace941d | 0.0.1 | hfopenllm_v2/TheTsar1209_qwen-carpmuscle-v0.2/1762652579.917543 | 1762652579.917544 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TheTsar1209/qwen-carpmuscle-v0.2 | TheTsar1209/qwen-carpmuscle-v0.2 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5256929391791557}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | alibaba | TheTsar1209/qwen-carpmuscle-v0.3 | f8aa8470-6803-458e-8207-b217969dd6f3 | 0.0.1 | hfopenllm_v2/TheTsar1209_qwen-carpmuscle-v0.3/1762652579.917758 | 1762652579.917759 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TheTsar1209/qwen-carpmuscle-v0.3 | TheTsar1209/qwen-carpmuscle-v0.3 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4476322823441801}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | alibaba | TheTsar1209/qwen-carpmuscle-r-v0.3 | 43b106fe-ff02-4cfe-956f-cfc9e272de78 | 0.0.1 | hfopenllm_v2/TheTsar1209_qwen-carpmuscle-r-v0.3/1762652579.917092 | 1762652579.917093 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TheTsar1209/qwen-carpmuscle-r-v0.3 | TheTsar1209/qwen-carpmuscle-r-v0.3 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.44550902715904905}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | alibaba | TheTsar1209/qwen-carpmuscle-v0.1 | ce9658b7-b457-4fb3-8fce-4173b5d93f2d | 0.0.1 | hfopenllm_v2/TheTsar1209_qwen-carpmuscle-v0.1/1762652579.917331 | 1762652579.917332 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TheTsar1209/qwen-carpmuscle-v0.1 | TheTsar1209/qwen-carpmuscle-v0.1 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5621628390448454}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | alibaba | TheTsar1209/qwen-carpmuscle-v0.4.1 | c464e6b4-aa76-4b42-ab9b-71f193ec2a57 | 0.0.1 | hfopenllm_v2/TheTsar1209_qwen-carpmuscle-v0.4.1/1762652579.918201 | 1762652579.9182022 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TheTsar1209/qwen-carpmuscle-v0.4.1 | TheTsar1209/qwen-carpmuscle-v0.4.1 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7359938297051822}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | alibaba | TheTsar1209/qwen-carpmuscle-v0.4 | 90fe60dc-76dd-4e90-99b4-c16d026afcb5 | 0.0.1 | hfopenllm_v2/TheTsar1209_qwen-carpmuscle-v0.4/1762652579.917984 | 1762652579.917985 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | TheTsar1209/qwen-carpmuscle-v0.4 | TheTsar1209/qwen-carpmuscle-v0.4 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7202068289915202}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | alibaba | freewheelin/free-evo-qwen72b-v0.8-re | cfb071af-7283-4155-8ce1-40f751dd46ec | 0.0.1 | hfopenllm_v2/freewheelin_free-evo-qwen72b-v0.8-re/1762652580.161332 | 1762652580.161333 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | freewheelin/free-evo-qwen72b-v0.8-re | freewheelin/free-evo-qwen72b-v0.8-re | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.533086654521115}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 72.288} |
HF Open LLM v2 | alibaba | minghaowu/Qwen1.5-1.8B-OpenHermes-2.5 | cf3f376a-92ec-4678-a57a-cee2e40032a5 | 0.0.1 | hfopenllm_v2/minghaowu_Qwen1.5-1.8B-OpenHermes-2.5/1762652580.360414 | 1762652580.360415 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | minghaowu/Qwen1.5-1.8B-OpenHermes-2.5 | minghaowu/Qwen1.5-1.8B-OpenHermes-2.5 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.27779735546128714}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.837} |
HF Open LLM v2 | alibaba | mergekit-community/SuperQwen-2.5-1.5B | 95d33475-a71b-41d6-a08d-3da30e631897 | 0.0.1 | hfopenllm_v2/mergekit-community_SuperQwen-2.5-1.5B/1762652580.346312 | 1762652580.346313 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | mergekit-community/SuperQwen-2.5-1.5B | mergekit-community/SuperQwen-2.5-1.5B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1336409615376091}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.777} |
HF Open LLM v2 | alibaba | migtissera/Tess-v2.5.2-Qwen2-72B | 34b9dd9e-dc03-4354-b016-3b1463a902f9 | 0.0.1 | hfopenllm_v2/migtissera_Tess-v2.5.2-Qwen2-72B/1762652580.359263 | 1762652580.359264 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | migtissera/Tess-v2.5.2-Qwen2-72B | migtissera/Tess-v2.5.2-Qwen2-72B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.44943084349525925}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.0} |
HF Open LLM v2 | alibaba | Alsebay/Qwen2.5-7B-test-novelist | 19ff3120-2171-48b3-8db6-1c76bb57cf47 | 0.0.1 | hfopenllm_v2/Alsebay_Qwen2.5-7B-test-novelist/1762652579.479883 | 1762652579.4798841 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Alsebay/Qwen2.5-7B-test-novelist | Alsebay/Qwen2.5-7B-test-novelist | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5351600420218354}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | securin/Securin-LLM-V2.5-Qwen-1.5B | cbd0163f-fbea-4f40-a26b-a0508ec02061 | 0.0.1 | hfopenllm_v2/securin_Securin-LLM-V2.5-Qwen-1.5B/1762652580.510926 | 1762652580.5109272 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | securin/Securin-LLM-V2.5-Qwen-1.5B | securin/Securin-LLM-V2.5-Qwen-1.5B | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1492030035860406}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.543} |
HF Open LLM v2 | alibaba | Kukedlc/Qwen-2.5-7b-Spanish-o1-CoT | c9a159fb-9e6b-49b3-8f2b-a2d2d3ca8f19 | 0.0.1 | hfopenllm_v2/Kukedlc_Qwen-2.5-7b-Spanish-o1-CoT/1762652579.703295 | 1762652579.703295 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Kukedlc/Qwen-2.5-7b-Spanish-o1-CoT | Kukedlc/Qwen-2.5-7b-Spanish-o1-CoT | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4210295349672203}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | alibaba | marcuscedricridia/Hush-Qwen2.5-7B-v1.4 | fd65e319-bc38-457b-9913-9a2214e69823 | 0.0.1 | hfopenllm_v2/marcuscedricridia_Hush-Qwen2.5-7B-v1.4/1762652580.334734 | 1762652580.3347352 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | marcuscedricridia/Hush-Qwen2.5-7B-v1.4 | marcuscedricridia/Hush-Qwen2.5-7B-v1.4 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7834545672149895}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.613} |
HF Open LLM v2 | alibaba | marcuscedricridia/Hush-Qwen2.5-7B-v1.2 | 6e342711-8d2d-42ed-a019-11be429e10d8 | 0.0.1 | hfopenllm_v2/marcuscedricridia_Hush-Qwen2.5-7B-v1.2/1762652580.334213 | 1762652580.334214 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | marcuscedricridia/Hush-Qwen2.5-7B-v1.2 | marcuscedricridia/Hush-Qwen2.5-7B-v1.2 | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7865020368178655}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.613} |
HF Open LLM v2 | alibaba | marcuscedricridia/Yell-Qwen2.5-7B-Preview | f47334f2-f0ab-48f5-814e-f3ede36802d9 | 0.0.1 | hfopenllm_v2/marcuscedricridia_Yell-Qwen2.5-7B-Preview/1762652580.335188 | 1762652580.335188 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | marcuscedricridia/Yell-Qwen2.5-7B-Preview | marcuscedricridia/Yell-Qwen2.5-7B-Preview | alibaba | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5838696879834395}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.613} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.