- commonsense:dataset=openbookqa,method=multiple_choice_joint,model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- groups
- gsm:model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- legalbench:subset=abercrombie,model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- legalbench:subset=corporate_lobbying,model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- legalbench:subset=function_of_decision_section,model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- legalbench:subset=international_citizenship_questions,model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- legalbench:subset=proa,model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- mmlu:subject=abstract_algebra,method=multiple_choice_joint,model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- mmlu:subject=college_chemistry,method=multiple_choice_joint,model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- mmlu:subject=computer_security,method=multiple_choice_joint,model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- mmlu:subject=econometrics,method=multiple_choice_joint,model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- mmlu:subject=us_foreign_policy,method=multiple_choice_joint,model=HuggingFaceTB_SmolLM2-360M-intermediate-checkpoints
- 2 Bytes
- 12.6 kB
- 5.04 kB
- 29.3 kB
- 622 kB
- 2.02 kB
- 50.1 kB
- 177 Bytes