_leaderboard stringclasses 1
value | _developer stringclasses 559
values | _model stringlengths 9 102 | _uuid stringlengths 36 36 | schema_version stringclasses 1
value | evaluation_id stringlengths 35 133 | retrieved_timestamp stringlengths 13 18 | source_data stringclasses 1
value | evaluation_source_name stringclasses 1
value | evaluation_source_type stringclasses 1
value | source_organization_name stringclasses 1
value | source_organization_url null | source_organization_logo_url null | evaluator_relationship stringclasses 1
value | model_name stringlengths 4 102 | model_id stringlengths 9 102 | model_developer stringclasses 559
values | model_inference_platform stringclasses 1
value | evaluation_results stringlengths 1.35k 1.41k | additional_details stringclasses 660
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
HF Open LLM v2 | jiviai | jiviai/medX_v2 | 386bc585-73ed-443e-b8ce-8723c533e41b | 0.0.1 | hfopenllm_v2/jiviai_medX_v2/1762652580.288615 | 1762652580.288616 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | jiviai/medX_v2 | jiviai/medX_v2 | jiviai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.37431792089433813}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | togethercomputer | togethercomputer/RedPajama-INCITE-Chat-3B-v1 | 9a0e6d99-4f86-4ce8-9b5a-f7b6c0fbd710 | 0.0.1 | hfopenllm_v2/togethercomputer_RedPajama-INCITE-Chat-3B-v1/1762652580.5763452 | 1762652580.5763478 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | togethercomputer/RedPajama-INCITE-Chat-3B-v1 | togethercomputer/RedPajama-INCITE-Chat-3B-v1 | togethercomputer | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16521496296493304}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GPTNeoXForCausalLM", "params_billions": 3.0} |
HF Open LLM v2 | togethercomputer | togethercomputer/GPT-NeoXT-Chat-Base-20B | 3b5ca740-a1e5-4043-ad56-c772bbdd1b38 | 0.0.1 | hfopenllm_v2/togethercomputer_GPT-NeoXT-Chat-Base-20B/1762652580.574344 | 1762652580.5743449 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | togethercomputer/GPT-NeoXT-Chat-Base-20B | togethercomputer/GPT-NeoXT-Chat-Base-20B | togethercomputer | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18297561581049393}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GPTNeoXForCausalLM", "params_billions": 20.0} |
HF Open LLM v2 | togethercomputer | togethercomputer/Llama-2-7B-32K-Instruct | a1609dba-826b-4246-9230-35bd68268fe4 | 0.0.1 | hfopenllm_v2/togethercomputer_Llama-2-7B-32K-Instruct/1762652580.574983 | 1762652580.5749838 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | togethercomputer/Llama-2-7B-32K-Instruct | togethercomputer/Llama-2-7B-32K-Instruct | togethercomputer | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2130003945087922}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | togethercomputer | togethercomputer/RedPajama-INCITE-Instruct-3B-v1 | e78a3888-33c7-4264-a01e-b0661504322f | 0.0.1 | hfopenllm_v2/togethercomputer_RedPajama-INCITE-Instruct-3B-v1/1762652580.576687 | 1762652580.576688 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | togethercomputer/RedPajama-INCITE-Instruct-3B-v1 | togethercomputer/RedPajama-INCITE-Instruct-3B-v1 | togethercomputer | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2124263620526869}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "GPTNeoXForCausalLM", "params_billions": 3.0} |
HF Open LLM v2 | togethercomputer | togethercomputer/RedPajama-INCITE-7B-Base | 8d69f711-74c9-4c1e-87dc-9b46f70674bb | 0.0.1 | hfopenllm_v2/togethercomputer_RedPajama-INCITE-7B-Base/1762652580.5751948 | 1762652580.5751958 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | togethercomputer/RedPajama-INCITE-7B-Base | togethercomputer/RedPajama-INCITE-7B-Base | togethercomputer | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20822971936683554}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GPTNeoXForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | togethercomputer | togethercomputer/RedPajama-INCITE-Base-3B-v1 | ba5c73b3-4785-44ef-8bfb-cfbbbdc16a91 | 0.0.1 | hfopenllm_v2/togethercomputer_RedPajama-INCITE-Base-3B-v1/1762652580.575899 | 1762652580.5758998 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | togethercomputer/RedPajama-INCITE-Base-3B-v1 | togethercomputer/RedPajama-INCITE-Base-3B-v1 | togethercomputer | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.22936253584932426}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GPTNeoXForCausalLM", "params_billions": 3.0} |
HF Open LLM v2 | togethercomputer | togethercomputer/RedPajama-INCITE-7B-Chat | c3b6efec-5428-499f-8e6b-e3b2b87a0d15 | 0.0.1 | hfopenllm_v2/togethercomputer_RedPajama-INCITE-7B-Chat/1762652580.57541 | 1762652580.5754108 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | togethercomputer/RedPajama-INCITE-7B-Chat | togethercomputer/RedPajama-INCITE-7B-Chat | togethercomputer | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1557977278066641}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "GPTNeoXForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | togethercomputer | togethercomputer/RedPajama-INCITE-7B-Instruct | d8cef007-51ab-4793-9a74-d9f29d6c0f27 | 0.0.1 | hfopenllm_v2/togethercomputer_RedPajama-INCITE-7B-Instruct/1762652580.57568 | 1762652580.575681 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | togethercomputer/RedPajama-INCITE-7B-Instruct | togethercomputer/RedPajama-INCITE-7B-Instruct | togethercomputer | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2055069437980115}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "GPTNeoXForCausalLM", "params_billions": 7.0} |
HF Open LLM v2 | elinas | elinas/Chronos-Gold-12B-1.0 | 4705d82c-514c-48a1-8f87-4d2b8f9aff6b | 0.0.1 | hfopenllm_v2/elinas_Chronos-Gold-12B-1.0/1762652580.1470149 | 1762652580.147016 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | elinas/Chronos-Gold-12B-1.0 | elinas/Chronos-Gold-12B-1.0 | elinas | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3165656014929277}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | NYTK | NYTK/PULI-LlumiX-32K | 7230c1f3-d7f6-4a96-8308-b2d5895a0a0a | 0.0.1 | hfopenllm_v2/NYTK_PULI-LlumiX-32K/1762652579.76952 | 1762652579.769521 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NYTK/PULI-LlumiX-32K | NYTK/PULI-LlumiX-32K | NYTK | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1699612583500667}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 6.738} |
HF Open LLM v2 | OliveiraJLT | OliveiraJLT/Sagui-7B-Instruct-v0.1 | d5135349-0757-469d-8ad3-80ef56d1f7de | 0.0.1 | hfopenllm_v2/OliveiraJLT_Sagui-7B-Instruct-v0.1/1762652579.798249 | 1762652579.798249 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | OliveiraJLT/Sagui-7B-Instruct-v0.1 | OliveiraJLT/Sagui-7B-Instruct-v0.1 | OliveiraJLT | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.28916275482386733}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 6.738} |
HF Open LLM v2 | Etherll | Etherll/Qwen2.5-Coder-7B-Instruct-Ties | ea9f32e5-431d-4573-9ac9-25ebfa9c2c9e | 0.0.1 | hfopenllm_v2/Etherll_Qwen2.5-Coder-7B-Instruct-Ties/1762652579.61485 | 1762652579.614851 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Etherll/Qwen2.5-Coder-7B-Instruct-Ties | Etherll/Qwen2.5-Coder-7B-Instruct-Ties | Etherll | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5005385709916355}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Etherll | Etherll/Chocolatine-3B-Instruct-DPO-Revised-Ties-v2 | 80ff60c0-820c-425d-8b32-44fc61128c9f | 0.0.1 | hfopenllm_v2/Etherll_Chocolatine-3B-Instruct-DPO-Revised-Ties-v2/1762652579.613742 | 1762652579.613743 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Etherll/Chocolatine-3B-Instruct-DPO-Revised-Ties-v2 | Etherll/Chocolatine-3B-Instruct-DPO-Revised-Ties-v2 | Etherll | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.37399322686028624}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | Etherll | Etherll/Chocolatine-3B-Instruct-DPO-Revised-Ties | d3b94b8e-8612-4928-bdba-81226af143b2 | 0.0.1 | hfopenllm_v2/Etherll_Chocolatine-3B-Instruct-DPO-Revised-Ties/1762652579.613493 | 1762652579.613494 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Etherll/Chocolatine-3B-Instruct-DPO-Revised-Ties | Etherll/Chocolatine-3B-Instruct-DPO-Revised-Ties | Etherll | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3724694920588483}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Phi3ForCausalLM", "params_billions": 3.821} |
HF Open LLM v2 | Etherll | Etherll/SuperHermes | a641d61c-aa42-4bce-afc0-ba7639f0a24e | 0.0.1 | hfopenllm_v2/Etherll_SuperHermes/1762652579.615286 | 1762652579.615287 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Etherll/SuperHermes | Etherll/SuperHermes | Etherll | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5459015412438996}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | openai | AI-Sweden-Models/gpt-sw3-40b | e791a3d6-928e-43c9-96ee-156901e8b18b | 0.0.1 | hfopenllm_v2/AI-Sweden-Models_gpt-sw3-40b/1762652579.475041 | 1762652579.475042 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | AI-Sweden-Models/gpt-sw3-40b | AI-Sweden-Models/gpt-sw3-40b | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1470298807164989}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "GPT2LMHeadModel", "params_billions": 39.927} |
HF Open LLM v2 | openai | sumink/ftgpt | ba4e0ed2-201a-4007-afbe-65e8276d853c | 0.0.1 | hfopenllm_v2/sumink_ftgpt/1762652580.5475452 | 1762652580.5475461 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | sumink/ftgpt | sumink/ftgpt | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.0787100449030794}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 0.124} |
HF Open LLM v2 | openai | Sharathhebbar24/chat_gpt2_dpo | ce90bca7-f999-44ef-9b72-1fdb4ac68eb0 | 0.0.1 | hfopenllm_v2/Sharathhebbar24_chat_gpt2_dpo/1762652579.8799832 | 1762652579.8799841 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Sharathhebbar24/chat_gpt2_dpo | Sharathhebbar24/chat_gpt2_dpo | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.09861944086135896}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GPT2LMHeadModel", "params_billions": 0.124} |
HF Open LLM v2 | openai | openai/gpt2 | 43c1b559-e9e8-477e-95d9-1c28ac5d265c | 0.0.1 | hfopenllm_v2/gpt2/1762652580.1809301 | 1762652580.180931 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | gpt2 | openai/gpt2 | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1934168007553292}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 0.137} |
HF Open LLM v2 | openai | openai/gpt2 | e28a8f11-68f6-464f-b1b8-21938cb41aa3 | 0.0.1 | hfopenllm_v2/gpt2/1762652580.181142 | 1762652580.181143 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | gpt2 | openai/gpt2 | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.08333333333333333}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GPT2LMHeadModel", "params_billions": 0.137} |
HF Open LLM v2 | openai | openai-community/gpt2 | a18409fa-1372-401e-8ae5-f25eaa6386d2 | 0.0.1 | hfopenllm_v2/openai-community_gpt2/1762652580.42929 | 1762652580.429291 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | openai-community/gpt2 | openai-community/gpt2 | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17925327021192655}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 0.137} |
HF Open LLM v2 | openai | openai-community/gpt2 | 435a8268-cf26-4c78-8789-758dd32759b1 | 0.0.1 | hfopenllm_v2/openai-community_gpt2/1762652580.429537 | 1762652580.429537 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | openai-community/gpt2 | openai-community/gpt2 | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.17795449407571912}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GPT2LMHeadModel", "params_billions": 0.137} |
HF Open LLM v2 | openai | openai-community/gpt2-medium | f68c55dc-0d74-4c75-ac57-62f23cce01b5 | 0.0.1 | hfopenllm_v2/openai-community_gpt2-medium/1762652580.4299362 | 1762652580.429937 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | openai-community/gpt2-medium | openai-community/gpt2-medium | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.22084402718121252}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 0.38} |
HF Open LLM v2 | openai | openai-community/gpt2-xl | 39a68088-0a01-482d-81b3-c6a84d98d0ca | 0.0.1 | hfopenllm_v2/openai-community_gpt2-xl/1762652580.430138 | 1762652580.430138 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | openai-community/gpt2-xl | openai-community/gpt2-xl | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20385798570016445}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 1.608} |
HF Open LLM v2 | openai | openai-community/gpt2-large | 15499118-2a47-4a6f-8c86-158a87a9350f | 0.0.1 | hfopenllm_v2/openai-community_gpt2-large/1762652580.4297202 | 1762652580.429721 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | openai-community/gpt2-large | openai-community/gpt2-large | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20478220011790937}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 0.812} |
HF Open LLM v2 | openai | togethercomputer/GPT-JT-6B-v1 | 03196258-8cc8-4c57-badf-9085ede8d658 | 0.0.1 | hfopenllm_v2/togethercomputer_GPT-JT-6B-v1/1762652580.574097 | 1762652580.5740979 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | togethercomputer/GPT-JT-6B-v1 | togethercomputer/GPT-JT-6B-v1 | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20610646418170453}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GPTJForCausalLM", "params_billions": 6.0} |
HF Open LLM v2 | openai | NYTK/PULI-GPTrio | 685fc779-4f8b-4110-82da-5a49697153a0 | 0.0.1 | hfopenllm_v2/NYTK_PULI-GPTrio/1762652579.769266 | 1762652579.769266 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NYTK/PULI-GPTrio | NYTK/PULI-GPTrio | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.21797164855915638}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GPTNeoXForCausalLM", "params_billions": 7.673} |
HF Open LLM v2 | openai | distilbert/distilgpt2 | a21cd9f0-6006-4587-bcd1-f1d42dfce7ba | 0.0.1 | hfopenllm_v2/distilbert_distilgpt2/1762652580.1266282 | 1762652580.126629 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | distilbert/distilgpt2 | distilbert/distilgpt2 | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.06110010328151527}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 0.088} |
HF Open LLM v2 | openai | postbot/gpt2-medium-emailgen | a661e335-7ed5-43b9-aa3b-1e027cebdb75 | 0.0.1 | hfopenllm_v2/postbot_gpt2-medium-emailgen/1762652580.4421701 | 1762652580.4421709 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | postbot/gpt2-medium-emailgen | postbot/gpt2-medium-emailgen | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1492030035860406}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 0.38} |
HF Open LLM v2 | openai | meraGPT/mera-mix-4x7B | 152e8d2f-8470-45b2-8318-9b6c44438978 | 0.0.1 | hfopenllm_v2/meraGPT_mera-mix-4x7B/1762652580.345789 | 1762652580.34579 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | meraGPT/mera-mix-4x7B | meraGPT/mera-mix-4x7B | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4831779677921249}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 24.154} |
HF Open LLM v2 | openai | DeepAutoAI/d2nwg_causal_gpt2_v1 | f822093a-2bdc-4284-8af2-8048d09afeb2 | 0.0.1 | hfopenllm_v2/DeepAutoAI_d2nwg_causal_gpt2_v1/1762652579.549553 | 1762652579.5495539 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DeepAutoAI/d2nwg_causal_gpt2_v1 | DeepAutoAI/d2nwg_causal_gpt2_v1 | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1988623518929773}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 0.124} |
HF Open LLM v2 | openai | DeepAutoAI/d2nwg_causal_gpt2 | 6b5b21c7-9284-4117-a63c-65628604e1a5 | 0.0.1 | hfopenllm_v2/DeepAutoAI_d2nwg_causal_gpt2/1762652579.549271 | 1762652579.549272 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DeepAutoAI/d2nwg_causal_gpt2 | DeepAutoAI/d2nwg_causal_gpt2 | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19161823960425006}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 0.124} |
HF Open LLM v2 | openai | DeepAutoAI/causal_gpt2 | bf683545-a6df-4deb-9a91-ea6b8eae8be7 | 0.0.1 | hfopenllm_v2/DeepAutoAI_causal_gpt2/1762652579.548641 | 1762652579.5486422 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | DeepAutoAI/causal_gpt2 | DeepAutoAI/causal_gpt2 | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1812767900282362}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 0.124} |
HF Open LLM v2 | openai | Kimargin/GPT-NEO-1.3B-wiki | 9084d476-dee7-4447-9955-e0f066bd35ba | 0.0.1 | hfopenllm_v2/Kimargin_GPT-NEO-1.3B-wiki/1762652579.6992168 | 1762652579.699218 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Kimargin/GPT-NEO-1.3B-wiki | Kimargin/GPT-NEO-1.3B-wiki | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19206815693471102}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "GPTNeoForCausalLM", "params_billions": 1.316} |
HF Open LLM v2 | openai | yuchenxie/ArlowGPT-3B-Multilingual | fd270937-c889-4a2b-aada-341a44c80d46 | 0.0.1 | hfopenllm_v2/yuchenxie_ArlowGPT-3B-Multilingual/1762652580.611115 | 1762652580.611116 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yuchenxie/ArlowGPT-3B-Multilingual | yuchenxie/ArlowGPT-3B-Multilingual | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6395486198841297}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 3.213} |
HF Open LLM v2 | openai | yuchenxie/ArlowGPT-8B | af890cb6-9d90-41b0-a7a1-c87f3584b93c | 0.0.1 | hfopenllm_v2/yuchenxie_ArlowGPT-8B/1762652580.611377 | 1762652580.611378 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | yuchenxie/ArlowGPT-8B | yuchenxie/ArlowGPT-8B | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7846536079823756}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | openai | langgptai/Qwen-las-v0.1 | cfaa9b4e-8588-45a5-9b9d-4268a71b128b | 0.0.1 | hfopenllm_v2/langgptai_Qwen-las-v0.1/1762652580.313808 | 1762652580.313809 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | langgptai/Qwen-las-v0.1 | langgptai/Qwen-las-v0.1 | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.33010412372504955}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "?", "params_billions": 7.901} |
HF Open LLM v2 | openai | microsoft/DialoGPT-medium | 3c70b5d5-784d-41fb-8ca7-eabd6a96a195 | 0.0.1 | hfopenllm_v2/microsoft_DialoGPT-medium/1762652580.353813 | 1762652580.3538141 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | microsoft/DialoGPT-medium | microsoft/DialoGPT-medium | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.14790422744983311}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GPT2LMHeadModel", "params_billions": 0.345} |
HF Open LLM v2 | openai | universalml/NepaliGPT-2.0 | 07a71559-e618-4ba7-8721-bc6834f1c727 | 0.0.1 | hfopenllm_v2/universalml_NepaliGPT-2.0/1762652580.578092 | 1762652580.578093 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | universalml/NepaliGPT-2.0 | universalml/NepaliGPT-2.0 | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.03649538779327739}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | openai | EleutherAI/gpt-neo-1.3B | dc615b98-9255-4a6e-afe2-c79d59362520 | 0.0.1 | hfopenllm_v2/EleutherAI_gpt-neo-1.3B/1762652579.59305 | 1762652579.59305 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EleutherAI/gpt-neo-1.3B | EleutherAI/gpt-neo-1.3B | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20790502533278366}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GPTNeoForCausalLM", "params_billions": 1.366} |
HF Open LLM v2 | openai | EleutherAI/gpt-j-6b | 1f140f2a-c9cb-49fb-8bcd-e59f699fd12a | 0.0.1 | hfopenllm_v2/EleutherAI_gpt-j-6b/1762652579.5928068 | 1762652579.592808 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EleutherAI/gpt-j-6b | EleutherAI/gpt-j-6b | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2522185578708937}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "GPTJForCausalLM", "params_billions": 6.0} |
HF Open LLM v2 | openai | EleutherAI/gpt-neox-20b | 0da6366b-b997-411e-ac76-c25b061e13f8 | 0.0.1 | hfopenllm_v2/EleutherAI_gpt-neox-20b/1762652579.5937028 | 1762652579.593704 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EleutherAI/gpt-neox-20b | EleutherAI/gpt-neox-20b | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2586880587951081}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "GPTNeoXForCausalLM", "params_billions": 20.739} |
HF Open LLM v2 | openai | EleutherAI/gpt-neo-125m | cff09938-5918-4825-b974-194019b48165 | 0.0.1 | hfopenllm_v2/EleutherAI_gpt-neo-125m/1762652579.593268 | 1762652579.593268 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EleutherAI/gpt-neo-125m | EleutherAI/gpt-neo-125m | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19054442213327305}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "GPTNeoForCausalLM", "params_billions": 0.15} |
HF Open LLM v2 | openai | EleutherAI/gpt-neo-2.7B | 6ebf0016-f747-4ccd-82fa-db427733b2f9 | 0.0.1 | hfopenllm_v2/EleutherAI_gpt-neo-2.7B/1762652579.5934908 | 1762652579.5934908 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | EleutherAI/gpt-neo-2.7B | EleutherAI/gpt-neo-2.7B | openai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2589628851447493}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "GPTNeoForCausalLM", "params_billions": 2.718} |
HF Open LLM v2 | Jacoby746 | Jacoby746/Casual-Magnum-34B | 0b9358f8-1e27-448f-9932-1f2c6feac036 | 0.0.1 | hfopenllm_v2/Jacoby746_Casual-Magnum-34B/1762652579.65033 | 1762652579.6503308 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Jacoby746/Casual-Magnum-34B | Jacoby746/Casual-Magnum-34B | Jacoby746 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19301675110927893}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 34.389} |
HF Open LLM v2 | Jacoby746 | Jacoby746/Proto-Harpy-Blazing-Light-v0.1-2x7B | f7455f30-e04e-4bc6-9d71-e33272d4577c | 0.0.1 | hfopenllm_v2/Jacoby746_Proto-Harpy-Blazing-Light-v0.1-2x7B/1762652579.651509 | 1762652579.65151 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Jacoby746/Proto-Harpy-Blazing-Light-v0.1-2x7B | Jacoby746/Proto-Harpy-Blazing-Light-v0.1-2x7B | Jacoby746 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.4904719477652628}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MixtralForCausalLM", "params_billions": 12.879} |
HF Open LLM v2 | Jacoby746 | Jacoby746/Inf-Silent-Kunoichi-v0.2-2x7B | f611991b-11c1-4232-bc63-8cf2942605ae | 0.0.1 | hfopenllm_v2/Jacoby746_Inf-Silent-Kunoichi-v0.2-2x7B/1762652579.650832 | 1762652579.650833 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Jacoby746/Inf-Silent-Kunoichi-v0.2-2x7B | Jacoby746/Inf-Silent-Kunoichi-v0.2-2x7B | Jacoby746 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.3636019095998617}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MixtralForCausalLM", "params_billions": 12.879} |
HF Open LLM v2 | Jacoby746 | Jacoby746/Inf-Silent-Kunoichi-v0.1-2x7B | d1fa6abf-be2b-4ea6-bcbe-066ac37aa54f | 0.0.1 | hfopenllm_v2/Jacoby746_Inf-Silent-Kunoichi-v0.1-2x7B/1762652579.6505952 | 1762652579.6505961 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Jacoby746/Inf-Silent-Kunoichi-v0.1-2x7B | Jacoby746/Inf-Silent-Kunoichi-v0.1-2x7B | Jacoby746 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.38798166642286913}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MixtralForCausalLM", "params_billions": 12.879} |
HF Open LLM v2 | Jacoby746 | Jacoby746/Proto-Athena-v0.2-4x7B | 060feab1-4ce6-44a9-8ae2-c06468dd4dc9 | 0.0.1 | hfopenllm_v2/Jacoby746_Proto-Athena-v0.2-4x7B/1762652579.651291 | 1762652579.6512918 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Jacoby746/Proto-Athena-v0.2-4x7B | Jacoby746/Proto-Athena-v0.2-4x7B | Jacoby746 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.37524213531208306}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 24.154} |
HF Open LLM v2 | Jacoby746 | Jacoby746/Proto-Athena-4x7B | 27d9d5c2-39d8-45e5-9614-a343144f05d8 | 0.0.1 | hfopenllm_v2/Jacoby746_Proto-Athena-4x7B/1762652579.651071 | 1762652579.651072 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Jacoby746/Proto-Athena-4x7B | Jacoby746/Proto-Athena-4x7B | Jacoby746 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.37029636918930664}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MixtralForCausalLM", "params_billions": 24.154} |
HF Open LLM v2 | Jacoby746 | Jacoby746/Proto-Harpy-Spark-v0.1-7B | 420cf07c-f043-49db-a62d-91e0c21aff2f | 0.0.1 | hfopenllm_v2/Jacoby746_Proto-Harpy-Spark-v0.1-7B/1762652579.651721 | 1762652579.651722 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Jacoby746/Proto-Harpy-Spark-v0.1-7B | Jacoby746/Proto-Harpy-Spark-v0.1-7B | Jacoby746 | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.43326928106313467}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | NTQAI | NTQAI/Nxcode-CQ-7B-orpo | 1c020e50-fe68-40c9-a36a-7bec201f409a | 0.0.1 | hfopenllm_v2/NTQAI_Nxcode-CQ-7B-orpo/1762652579.769034 | 1762652579.769035 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NTQAI/Nxcode-CQ-7B-orpo | NTQAI/Nxcode-CQ-7B-orpo | NTQAI | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.40072119753365515}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.25} |
HF Open LLM v2 | NTQAI | NTQAI/NxMobileLM-1.5B-SFT | 7a295af9-fb47-484f-8748-af3ee245d2c5 | 0.0.1 | hfopenllm_v2/NTQAI_NxMobileLM-1.5B-SFT/1762652579.768717 | 1762652579.768718 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NTQAI/NxMobileLM-1.5B-SFT | NTQAI/NxMobileLM-1.5B-SFT | NTQAI | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6392239258500778}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.544} |
HF Open LLM v2 | fluently-sets | fluently-sets/FalconThink3-10B-IT | 9329922e-7594-497d-bfab-9c8a18300dc9 | 0.0.1 | hfopenllm_v2/fluently-sets_FalconThink3-10B-IT/1762652580.1573172 | 1762652580.1573179 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | fluently-sets/FalconThink3-10B-IT | fluently-sets/FalconThink3-10B-IT | fluently-sets | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7326216660682544}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 10.306} |
HF Open LLM v2 | fluently-sets | fluently-sets/reasoning-1-1k-demo | c63fc7e4-87ae-4516-ad3d-df95693133d5 | 0.0.1 | hfopenllm_v2/fluently-sets_reasoning-1-1k-demo/1762652580.157624 | 1762652580.1576252 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | fluently-sets/reasoning-1-1k-demo | fluently-sets/reasoning-1-1k-demo | fluently-sets | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7524800861713586}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | abacusai | abacusai/bigyi-15b | 19b4d65c-39c7-4b81-bb71-f166ab4f9490 | 0.0.1 | hfopenllm_v2/abacusai_bigyi-15b/1762652579.972117 | 1762652579.972117 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | abacusai/bigyi-15b | abacusai/bigyi-15b | abacusai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20940327220663396}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "LlamaForCausalLM", "params_billions": 15.058} |
HF Open LLM v2 | abacusai | abacusai/Smaug-Llama-3-70B-Instruct-32K | 962b4977-63f0-4a87-a36e-f3e592b74761 | 0.0.1 | hfopenllm_v2/abacusai_Smaug-Llama-3-70B-Instruct-32K/1762652579.971162 | 1762652579.9711628 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | abacusai/Smaug-Llama-3-70B-Instruct-32K | abacusai/Smaug-Llama-3-70B-Instruct-32K | abacusai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7761107195574409}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 70.554} |
HF Open LLM v2 | abacusai | abacusai/Dracarys-72B-Instruct | 2f1e6f4e-86e6-47a4-96e6-3bc2b330cd3a | 0.0.1 | hfopenllm_v2/abacusai_Dracarys-72B-Instruct/1762652579.969532 | 1762652579.969532 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | abacusai/Dracarys-72B-Instruct | abacusai/Dracarys-72B-Instruct | abacusai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7855778224001206}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.706} |
HF Open LLM v2 | abacusai | abacusai/Smaug-Qwen2-72B-Instruct | 84695a6b-dc11-448c-bbeb-b3cc05cde7ba | 0.0.1 | hfopenllm_v2/abacusai_Smaug-Qwen2-72B-Instruct/1762652579.9716392 | 1762652579.97164 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | abacusai/Smaug-Qwen2-72B-Instruct | abacusai/Smaug-Qwen2-72B-Instruct | abacusai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7825303527972447}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 72.706} |
HF Open LLM v2 | abacusai | abacusai/Smaug-34B-v0.1 | e0b9044d-1b87-44f7-b59b-88d790f429e5 | 0.0.1 | hfopenllm_v2/abacusai_Smaug-34B-v0.1/1762652579.970392 | 1762652579.9703932 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | abacusai/Smaug-34B-v0.1 | abacusai/Smaug-34B-v0.1 | abacusai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5015625207782018}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 34.389} |
HF Open LLM v2 | abacusai | abacusai/Smaug-72B-v0.1 | a3b08cd3-6ead-4db0-92ed-212c6b0e45ee | 0.0.1 | hfopenllm_v2/abacusai_Smaug-72B-v0.1/1762652579.970887 | 1762652579.9708889 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | abacusai/Smaug-72B-v0.1 | abacusai/Smaug-72B-v0.1 | abacusai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5167001334237601}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 72.289} |
HF Open LLM v2 | abacusai | abacusai/Smaug-Mixtral-v0.1 | ba0fe822-7a57-4ccb-a97e-e852a59d9ae1 | 0.0.1 | hfopenllm_v2/abacusai_Smaug-Mixtral-v0.1/1762652579.971408 | 1762652579.9714088 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | abacusai/Smaug-Mixtral-v0.1 | abacusai/Smaug-Mixtral-v0.1 | abacusai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5554428915278129}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 46.703} |
HF Open LLM v2 | abacusai | abacusai/bigstral-12b-32k | aed1ac03-5364-477e-ab8f-68b599170128 | 0.0.1 | hfopenllm_v2/abacusai_bigstral-12b-32k/1762652579.971883 | 1762652579.971884 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | abacusai/bigstral-12b-32k | abacusai/bigstral-12b-32k | abacusai | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.41938057686937324}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 12.476} |
HF Open LLM v2 | FuJhen | FuJhen/ft-openhermes-25-mistral-7b-irca-dpo-pairs | bfaec047-518f-42a0-93a1-c6bda3589c26 | 0.0.1 | hfopenllm_v2/FuJhen_ft-openhermes-25-mistral-7b-irca-dpo-pairs/1762652579.624908 | 1762652579.6249092 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | FuJhen/ft-openhermes-25-mistral-7b-irca-dpo-pairs | FuJhen/ft-openhermes-25-mistral-7b-irca-dpo-pairs | FuJhen | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5420041046645123}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "?", "params_billions": 14.483} |
HF Open LLM v2 | FuJhen | FuJhen/mistral-instruct-7B-DPO | 5f79d177-3ca8-4c95-83bb-2abb0e803e72 | 0.0.1 | hfopenllm_v2/FuJhen_mistral-instruct-7B-DPO/1762652579.625171 | 1762652579.625172 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | FuJhen/mistral-instruct-7B-DPO | FuJhen/mistral-instruct-7B-DPO | FuJhen | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.49684171332065585}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "?", "params_billions": 14.496} |
HF Open LLM v2 | agentlans | agentlans/Llama-3.2-1B-Instruct-CrashCourse12K | fbedd898-b839-49c1-bd6d-3a8744d4138a | 0.0.1 | hfopenllm_v2/agentlans_Llama-3.2-1B-Instruct-CrashCourse12K/1762652579.976028 | 1762652579.976029 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | agentlans/Llama-3.2-1B-Instruct-CrashCourse12K | agentlans/Llama-3.2-1B-Instruct-CrashCourse12K | agentlans | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5395062877609188}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 1.236} |
HF Open LLM v2 | agentlans | agentlans/Llama3.1-Daredevilish-Instruct | 7a6d7a66-5772-4793-9597-ef0225b63f30 | 0.0.1 | hfopenllm_v2/agentlans_Llama3.1-Daredevilish-Instruct/1762652579.9768262 | 1762652579.976827 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | agentlans/Llama3.1-Daredevilish-Instruct | agentlans/Llama3.1-Daredevilish-Instruct | agentlans | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7925969760236173}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | agentlans | agentlans/Qwen2.5-0.5B-Instruct-CrashCourse-dropout | ad130d6f-6a5e-447a-a1ee-bfa2d93e5336 | 0.0.1 | hfopenllm_v2/agentlans_Qwen2.5-0.5B-Instruct-CrashCourse-dropout/1762652579.9778361 | 1762652579.977837 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | agentlans/Qwen2.5-0.5B-Instruct-CrashCourse-dropout | agentlans/Qwen2.5-0.5B-Instruct-CrashCourse-dropout | agentlans | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2948831323111566}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | Svak | Svak/MN-12B-Inferor-v0.0 | 5bb52ed5-e59a-4e60-a6eb-9e9322d95ccc | 0.0.1 | hfopenllm_v2/Svak_MN-12B-Inferor-v0.0/1762652579.896373 | 1762652579.896374 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Svak/MN-12B-Inferor-v0.0 | Svak/MN-12B-Inferor-v0.0 | Svak | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5707555951541909}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | Svak | Svak/MN-12B-Inferor-v0.1 | 9bfe838e-a568-4933-b03d-3e9ae6d2026d | 0.0.1 | hfopenllm_v2/Svak_MN-12B-Inferor-v0.1/1762652579.8966348 | 1762652579.896636 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Svak/MN-12B-Inferor-v0.1 | Svak/MN-12B-Inferor-v0.1 | Svak | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.6346527214457639}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "float16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | tensopolis | tensopolis/lamarckvergence-14b-tensopolis-v1 | da94039c-b214-4ad0-a312-a38cea28498b | 0.0.1 | hfopenllm_v2/tensopolis_lamarckvergence-14b-tensopolis-v1/1762652580.555553 | 1762652580.5555542 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tensopolis/lamarckvergence-14b-tensopolis-v1 | tensopolis/lamarckvergence-14b-tensopolis-v1 | tensopolis | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7603735865281896}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | tensopolis | tensopolis/virtuoso-small-tensopolis-v1 | 2228ade6-6243-423f-857e-66f5584a1511 | 0.0.1 | hfopenllm_v2/tensopolis_virtuoso-small-tensopolis-v1/1762652580.5582058 | 1762652580.558207 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tensopolis/virtuoso-small-tensopolis-v1 | tensopolis/virtuoso-small-tensopolis-v1 | tensopolis | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7856276900845313}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | tensopolis | tensopolis/falcon3-10b-tensopolis-v1 | d59c7d7c-99a9-4de5-9a69-60b934eafa1b | 0.0.1 | hfopenllm_v2/tensopolis_falcon3-10b-tensopolis-v1/1762652580.555104 | 1762652580.555105 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tensopolis/falcon3-10b-tensopolis-v1 | tensopolis/falcon3-10b-tensopolis-v1 | tensopolis | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7816560060639104}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 10.306} |
HF Open LLM v2 | tensopolis | tensopolis/falcon3-10b-tensopolis-v2 | ce5dfe15-432b-42ac-9ef1-569ab4e640a6 | 0.0.1 | hfopenllm_v2/tensopolis_falcon3-10b-tensopolis-v2/1762652580.555352 | 1762652580.5553532 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tensopolis/falcon3-10b-tensopolis-v2 | tensopolis/falcon3-10b-tensopolis-v2 | tensopolis | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.7792080568447275}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 10.306} |
HF Open LLM v2 | tensopolis | tensopolis/virtuoso-lite-tensopolis-v2 | 9024dcc9-fbd0-4ab0-9142-cbf741e7ae54 | 0.0.1 | hfopenllm_v2/tensopolis_virtuoso-lite-tensopolis-v2/1762652580.5578399 | 1762652580.5578408 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tensopolis/virtuoso-lite-tensopolis-v2 | tensopolis/virtuoso-lite-tensopolis-v2 | tensopolis | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8029384255996312}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 10.306} |
HF Open LLM v2 | tensopolis | tensopolis/virtuoso-lite-tensopolis-v1 | 574e1e63-46f3-44a4-8d04-ad1709a7e1dd | 0.0.1 | hfopenllm_v2/tensopolis_virtuoso-lite-tensopolis-v1/1762652580.557624 | 1762652580.557625 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tensopolis/virtuoso-lite-tensopolis-v1 | tensopolis/virtuoso-lite-tensopolis-v1 | tensopolis | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.806910109620252}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 10.306} |
HF Open LLM v2 | tensopolis | tensopolis/virtuoso-small-v2-tensopolis-v1 | 727869c4-3498-482a-a04e-c6a779c0e558 | 0.0.1 | hfopenllm_v2/tensopolis_virtuoso-small-v2-tensopolis-v1/1762652580.558718 | 1762652580.558719 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tensopolis/virtuoso-small-v2-tensopolis-v1 | tensopolis/virtuoso-small-v2-tensopolis-v1 | tensopolis | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8419061423689145}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.766} |
HF Open LLM v2 | tensopolis | tensopolis/virtuoso-small-tensopolis-v2 | c5c34d42-c043-4d60-80bf-5cb522e9d915 | 0.0.1 | hfopenllm_v2/tensopolis_virtuoso-small-tensopolis-v2/1762652580.5584881 | 1762652580.558489 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | tensopolis/virtuoso-small-tensopolis-v2 | tensopolis/virtuoso-small-tensopolis-v2 | tensopolis | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.8020142111818863}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 14.77} |
HF Open LLM v2 | ymcki | ymcki/Llama-3.1-8B-SFT-GRPO-Instruct | 938af657-ca9b-4400-84e1-002065f92f84 | 0.0.1 | hfopenllm_v2/ymcki_Llama-3.1-8B-SFT-GRPO-Instruct/1762652580.608792 | 1762652580.608793 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ymcki/Llama-3.1-8B-SFT-GRPO-Instruct | ymcki/Llama-3.1-8B-SFT-GRPO-Instruct | ymcki | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.33540007180946557}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | ymcki | ymcki/Llama-3.1-8B-GRPO-Instruct | cb38b3bb-6188-430f-b863-9bf86cc877f9 | 0.0.1 | hfopenllm_v2/ymcki_Llama-3.1-8B-GRPO-Instruct/1762652580.608475 | 1762652580.608476 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | ymcki/Llama-3.1-8B-GRPO-Instruct | ymcki/Llama-3.1-8B-GRPO-Instruct | ymcki | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.744536718130117}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH"... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Luni | Luni/StarDust-12b-v1 | fa64b745-6b4b-4fee-b77e-d744e54a17d6 | 0.0.1 | hfopenllm_v2/Luni_StarDust-12b-v1/1762652579.736537 | 1762652579.7365382 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Luni/StarDust-12b-v1 | Luni/StarDust-12b-v1 | Luni | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5459259210007226}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | Luni | Luni/StarDust-12b-v2 | 401f6afc-9a2a-4bfe-87b2-daa6df848424 | 0.0.1 | hfopenllm_v2/Luni_StarDust-12b-v2/1762652579.736784 | 1762652579.736785 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Luni/StarDust-12b-v2 | Luni/StarDust-12b-v2 | Luni | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5628620947973599}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 12.248} |
HF Open LLM v2 | NousResearch | NousResearch/Yarn-Solar-10b-32k | a18a259d-1795-4848-94fd-3b9c3abfb9da | 0.0.1 | hfopenllm_v2/NousResearch_Yarn-Solar-10b-32k/1762652579.793437 | 1762652579.793438 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Yarn-Solar-10b-32k | NousResearch/Yarn-Solar-10b-32k | NousResearch | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.19421579187666504}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 10.0} |
HF Open LLM v2 | NousResearch | NousResearch/Nous-Hermes-2-SOLAR-10.7B | 80a7b60b-77f7-4dbf-96c8-071c56179fec | 0.0.1 | hfopenllm_v2/NousResearch_Nous-Hermes-2-SOLAR-10.7B/1762652579.791853 | 1762652579.7918541 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Nous-Hermes-2-SOLAR-10.7B | NousResearch/Nous-Hermes-2-SOLAR-10.7B | NousResearch | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5278660620486975}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 10.732} |
HF Open LLM v2 | NousResearch | NousResearch/Yarn-Solar-10b-64k | 1904c811-34ae-4f52-9978-622bc6dd6f2e | 0.0.1 | hfopenllm_v2/NousResearch_Yarn-Solar-10b-64k/1762652579.793644 | 1762652579.7936451 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Yarn-Solar-10b-64k | NousResearch/Yarn-Solar-10b-64k | NousResearch | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1988867316498003}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 10.0} |
HF Open LLM v2 | NousResearch | NousResearch/Nous-Hermes-2-Mistral-7B-DPO | 877421ae-8135-485f-805e-489ed70dc886 | 0.0.1 | hfopenllm_v2/NousResearch_Nous-Hermes-2-Mistral-7B-DPO/1762652579.7912042 | 1762652579.7912052 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Nous-Hermes-2-Mistral-7B-DPO | NousResearch/Nous-Hermes-2-Mistral-7B-DPO | NousResearch | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5762510139762497}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MistralForCausalLM", "params_billions": 7.242} |
HF Open LLM v2 | NousResearch | NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT | 3c196d70-44ad-419c-8c4c-80fc7f184687 | 0.0.1 | hfopenllm_v2/NousResearch_Nous-Hermes-2-Mixtral-8x7B-SFT/1762652579.791643 | 1762652579.7916439 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT | NousResearch/Nous-Hermes-2-Mixtral-8x7B-SFT | NousResearch | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5730783210769648}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 46.703} |
HF Open LLM v2 | NousResearch | NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO | bc2d14fe-000a-40ce-a57c-c00fe584a7e4 | 0.0.1 | hfopenllm_v2/NousResearch_Nous-Hermes-2-Mixtral-8x7B-DPO/1762652579.791439 | 1762652579.7914398 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO | NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO | NousResearch | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5896898008395501}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "MixtralForCausalLM", "params_billions": 46.703} |
HF Open LLM v2 | Youlln | Youlln/ECE-PRYMMAL-YL-1B-SLERP-V1 | 70577ab1-a0ef-41f3-8d6a-00b0b873ee39 | 0.0.1 | hfopenllm_v2/Youlln_ECE-PRYMMAL-YL-1B-SLERP-V1/1762652579.962892 | 1762652579.962893 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/ECE-PRYMMAL-YL-1B-SLERP-V1 | Youlln/ECE-PRYMMAL-YL-1B-SLERP-V1 | Youlln | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.32510848991786234}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.544} |
HF Open LLM v2 | Youlln | Youlln/ECE-MIRAGE-1-12B | f3f55015-88c7-41ae-b588-9a1eedd56fc2 | 0.0.1 | hfopenllm_v2/Youlln_ECE-MIRAGE-1-12B/1762652579.96142 | 1762652579.96142 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/ECE-MIRAGE-1-12B | Youlln/ECE-MIRAGE-1-12B | Youlln | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20698081091503875}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 15.21} |
HF Open LLM v2 | Youlln | Youlln/ECE-PRYMMAL0.5-FT | 4264c0fc-9f40-4c27-b877-63a751678a1c | 0.0.1 | hfopenllm_v2/Youlln_ECE-PRYMMAL0.5-FT/1762652579.963541 | 1762652579.963541 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/ECE-PRYMMAL0.5-FT | Youlln/ECE-PRYMMAL0.5-FT | Youlln | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.18507338306803725}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | Youlln | Youlln/ECE-PRYMMAL-YL-1B-SLERP-V2 | 6021f954-951a-47e1-980d-ce729f9f39b4 | 0.0.1 | hfopenllm_v2/Youlln_ECE-PRYMMAL-YL-1B-SLERP-V2/1762652579.963118 | 1762652579.963118 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/ECE-PRYMMAL-YL-1B-SLERP-V2 | Youlln/ECE-PRYMMAL-YL-1B-SLERP-V2 | Youlln | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.32510848991786234}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 1.544} |
HF Open LLM v2 | Youlln | Youlln/1PARAMMYL-8B-ModelStock | 87231cbd-d911-434d-991b-1eb373cdde4f | 0.0.1 | hfopenllm_v2/Youlln_1PARAMMYL-8B-ModelStock/1762652579.9604638 | 1762652579.960465 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/1PARAMMYL-8B-ModelStock | Youlln/1PARAMMYL-8B-ModelStock | Youlln | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.5371336941537344}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "LlamaForCausalLM", "params_billions": 8.03} |
HF Open LLM v2 | Youlln | Youlln/ECE-PRYMMAL-0.5B-FT-V4-MUSR | 68382b86-8a68-428e-8338-144a76b8c293 | 0.0.1 | hfopenllm_v2/Youlln_ECE-PRYMMAL-0.5B-FT-V4-MUSR/1762652579.9622452 | 1762652579.962246 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/ECE-PRYMMAL-0.5B-FT-V4-MUSR | Youlln/ECE-PRYMMAL-0.5B-FT-V4-MUSR | Youlln | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.1137570535069172}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | Youlln | Youlln/ECE-PRYMMAL-0.5B-FT-V3 | 45c46c5d-cf81-42d4-bf9e-61aca49b2959 | 0.0.1 | hfopenllm_v2/Youlln_ECE-PRYMMAL-0.5B-FT-V3/1762652579.9618208 | 1762652579.9618208 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/ECE-PRYMMAL-0.5B-FT-V3 | Youlln/ECE-PRYMMAL-0.5B-FT-V3 | Youlln | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.16419101317836673}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | Youlln | Youlln/ECE-PRYMMAL-YL-7B-SLERP-V4 | e027a39b-1213-42aa-b66f-b1853c644532 | 0.0.1 | hfopenllm_v2/Youlln_ECE-PRYMMAL-YL-7B-SLERP-V4/1762652579.963329 | 1762652579.963329 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/ECE-PRYMMAL-YL-7B-SLERP-V4 | Youlln/ECE-PRYMMAL-YL-7B-SLERP-V4 | Youlln | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2509696494190969}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 7.616} |
HF Open LLM v2 | Youlln | Youlln/ECE-PRYMMAL-0.5B-FT-V3-MUSR | de30a84d-c8cc-4f3c-9eb4-3f58754dc46b | 0.0.1 | hfopenllm_v2/Youlln_ECE-PRYMMAL-0.5B-FT-V3-MUSR/1762652579.962029 | 1762652579.962029 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/ECE-PRYMMAL-0.5B-FT-V3-MUSR | Youlln/ECE-PRYMMAL-0.5B-FT-V3-MUSR | Youlln | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.15334977858748122}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.494} |
HF Open LLM v2 | Youlln | Youlln/ECE-MIRAGE-1-15B | f904e587-76ac-4583-9235-fcdd20d9a626 | 0.0.1 | hfopenllm_v2/Youlln_ECE-MIRAGE-1-15B/1762652579.961622 | 1762652579.961622 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/ECE-MIRAGE-1-15B | Youlln/ECE-MIRAGE-1-15B | Youlln | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.20698081091503875}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BB... | {"precision": "float16", "architecture": "Qwen2ForCausalLM", "params_billions": 15.21} |
HF Open LLM v2 | Youlln | Youlln/ECE.EIFFEIL.ia-0.5B-SLERP | 7a5fdffa-146b-43fd-a979-728c37ae599f | 0.0.1 | hfopenllm_v2/Youlln_ECE.EIFFEIL.ia-0.5B-SLERP/1762652579.964375 | 1762652579.964375 | ["https://open-llm-leaderboard-open-llm-leaderboard.hf.space/api/leaderboard/formatted"] | HF Open LLM v2 | leaderboard | Hugging Face | null | null | third_party | Youlln/ECE.EIFFEIL.ia-0.5B-SLERP | Youlln/ECE.EIFFEIL.ia-0.5B-SLERP | Youlln | unknown | [{"evaluation_name": "IFEval", "metric_config": {"evaluation_description": "Accuracy on IFEval", "lower_is_better": false, "score_type": "continuous", "min_score": 0, "max_score": 1}, "score_details": {"score": 0.2561403742071038}}, {"evaluation_name": "BBH", "metric_config": {"evaluation_description": "Accuracy on BBH... | {"precision": "bfloat16", "architecture": "Qwen2ForCausalLM", "params_billions": 0.63} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.