{ "results": { "mmlu": { "acc,none": 0.3726677111522575, "acc_stderr,none": 0.0040496674485089226, "alias": "mmlu" }, "mmlu_humanities": { "acc,none": 0.34537725823591925, "acc_stderr,none": 0.006847247938989206, "alias": " - humanities" }, "mmlu_formal_logic": { "alias": " - formal_logic", "acc,none": 0.31746031746031744, "acc_stderr,none": 0.04163453031302859 }, "mmlu_high_school_european_history": { "alias": " - high_school_european_history", "acc,none": 0.47878787878787876, "acc_stderr,none": 0.03900828913737302 }, "mmlu_high_school_us_history": { "alias": " - high_school_us_history", "acc,none": 0.46078431372549017, "acc_stderr,none": 0.03498501649369527 }, "mmlu_high_school_world_history": { "alias": " - high_school_world_history", "acc,none": 0.5274261603375527, "acc_stderr,none": 0.03249822718301303 }, "mmlu_international_law": { "alias": " - international_law", "acc,none": 0.5537190082644629, "acc_stderr,none": 0.0453793517794788 }, "mmlu_jurisprudence": { "alias": " - jurisprudence", "acc,none": 0.3425925925925926, "acc_stderr,none": 0.045879047413018105 }, "mmlu_logical_fallacies": { "alias": " - logical_fallacies", "acc,none": 0.39263803680981596, "acc_stderr,none": 0.0383674090783103 }, "mmlu_moral_disputes": { "alias": " - moral_disputes", "acc,none": 0.36416184971098264, "acc_stderr,none": 0.025906632631016117 }, "mmlu_moral_scenarios": { "alias": " - moral_scenarios", "acc,none": 0.26256983240223464, "acc_stderr,none": 0.014716824273017752 }, "mmlu_philosophy": { "alias": " - philosophy", "acc,none": 0.33762057877813506, "acc_stderr,none": 0.026858825879488533 }, "mmlu_prehistory": { "alias": " - prehistory", "acc,none": 0.3950617283950617, "acc_stderr,none": 0.02720111766692565 }, "mmlu_professional_law": { "alias": " - professional_law", "acc,none": 0.30964797913950454, "acc_stderr,none": 0.011808598262503318 }, "mmlu_world_religions": { "alias": " - world_religions", "acc,none": 0.29239766081871343, "acc_stderr,none": 0.03488647713457922 }, "mmlu_other": { "acc,none": 0.3855809462504023, "acc_stderr,none": 0.008716859594007323, "alias": " - other" }, "mmlu_business_ethics": { "alias": " - business_ethics", "acc,none": 0.36, "acc_stderr,none": 0.048241815132442176 }, "mmlu_clinical_knowledge": { "alias": " - clinical_knowledge", "acc,none": 0.37735849056603776, "acc_stderr,none": 0.029832808114796005 }, "mmlu_college_medicine": { "alias": " - college_medicine", "acc,none": 0.35260115606936415, "acc_stderr,none": 0.036430371689585496 }, "mmlu_global_facts": { "alias": " - global_facts", "acc,none": 0.23, "acc_stderr,none": 0.04229525846816505 }, "mmlu_human_aging": { "alias": " - human_aging", "acc,none": 0.38565022421524664, "acc_stderr,none": 0.03266842214289201 }, "mmlu_management": { "alias": " - management", "acc,none": 0.39805825242718446, "acc_stderr,none": 0.04846748253977239 }, "mmlu_marketing": { "alias": " - marketing", "acc,none": 0.4444444444444444, "acc_stderr,none": 0.03255326307272485 }, "mmlu_medical_genetics": { "alias": " - medical_genetics", "acc,none": 0.4, "acc_stderr,none": 0.049236596391733084 }, "mmlu_miscellaneous": { "alias": " - miscellaneous", "acc,none": 0.3831417624521073, "acc_stderr,none": 0.01738477419488562 }, "mmlu_nutrition": { "alias": " - nutrition", "acc,none": 0.40522875816993464, "acc_stderr,none": 0.02811092849280908 }, "mmlu_professional_accounting": { "alias": " - professional_accounting", "acc,none": 0.3546099290780142, "acc_stderr,none": 0.02853865002887864 }, "mmlu_professional_medicine": { "alias": " - professional_medicine", "acc,none": 0.45955882352941174, "acc_stderr,none": 0.03027332507734575 }, "mmlu_virology": { "alias": " - virology", "acc,none": 0.3493975903614458, "acc_stderr,none": 0.0371172519074075 }, "mmlu_social_sciences": { "acc,none": 0.40038999025024374, "acc_stderr,none": 0.008797293522352105, "alias": " - social sciences" }, "mmlu_econometrics": { "alias": " - econometrics", "acc,none": 0.32456140350877194, "acc_stderr,none": 0.04404556157374768 }, "mmlu_high_school_geography": { "alias": " - high_school_geography", "acc,none": 0.45454545454545453, "acc_stderr,none": 0.035476014940069384 }, "mmlu_high_school_government_and_politics": { "alias": " - high_school_government_and_politics", "acc,none": 0.45077720207253885, "acc_stderr,none": 0.03590910952235525 }, "mmlu_high_school_macroeconomics": { "alias": " - high_school_macroeconomics", "acc,none": 0.36923076923076925, "acc_stderr,none": 0.02446861524147892 }, "mmlu_high_school_microeconomics": { "alias": " - high_school_microeconomics", "acc,none": 0.36134453781512604, "acc_stderr,none": 0.031204691225150023 }, "mmlu_high_school_psychology": { "alias": " - high_school_psychology", "acc,none": 0.48990825688073397, "acc_stderr,none": 0.02143295620345332 }, "mmlu_human_sexuality": { "alias": " - human_sexuality", "acc,none": 0.35877862595419846, "acc_stderr,none": 0.04206739313864908 }, "mmlu_professional_psychology": { "alias": " - professional_psychology", "acc,none": 0.3480392156862745, "acc_stderr,none": 0.019270998708223977 }, "mmlu_public_relations": { "alias": " - public_relations", "acc,none": 0.38181818181818183, "acc_stderr,none": 0.04653429807913508 }, "mmlu_security_studies": { "alias": " - security_studies", "acc,none": 0.40816326530612246, "acc_stderr,none": 0.03146465712827424 }, "mmlu_sociology": { "alias": " - sociology", "acc,none": 0.3880597014925373, "acc_stderr,none": 0.03445789964362749 }, "mmlu_us_foreign_policy": { "alias": " - us_foreign_policy", "acc,none": 0.41, "acc_stderr,none": 0.049431107042371025 }, "mmlu_stem": { "acc,none": 0.37361243260386934, "acc_stderr,none": 0.008566808416312282, "alias": " - stem" }, "mmlu_abstract_algebra": { "alias": " - abstract_algebra", "acc,none": 0.26, "acc_stderr,none": 0.04408440022768078 }, "mmlu_anatomy": { "alias": " - anatomy", "acc,none": 0.37037037037037035, "acc_stderr,none": 0.04171654161354543 }, "mmlu_astronomy": { "alias": " - astronomy", "acc,none": 0.4605263157894737, "acc_stderr,none": 0.04056242252249034 }, "mmlu_college_biology": { "alias": " - college_biology", "acc,none": 0.4305555555555556, "acc_stderr,none": 0.04140685639111503 }, "mmlu_college_chemistry": { "alias": " - college_chemistry", "acc,none": 0.37, "acc_stderr,none": 0.048523658709391 }, "mmlu_college_computer_science": { "alias": " - college_computer_science", "acc,none": 0.46, "acc_stderr,none": 0.05009082659620333 }, "mmlu_college_mathematics": { "alias": " - college_mathematics", "acc,none": 0.37, "acc_stderr,none": 0.048523658709391 }, "mmlu_college_physics": { "alias": " - college_physics", "acc,none": 0.22549019607843138, "acc_stderr,none": 0.041583075330832865 }, "mmlu_computer_security": { "alias": " - computer_security", "acc,none": 0.43, "acc_stderr,none": 0.049756985195624284 }, "mmlu_conceptual_physics": { "alias": " - conceptual_physics", "acc,none": 0.2978723404255319, "acc_stderr,none": 0.029896145682095462 }, "mmlu_electrical_engineering": { "alias": " - electrical_engineering", "acc,none": 0.4, "acc_stderr,none": 0.040824829046386284 }, "mmlu_elementary_mathematics": { "alias": " - elementary_mathematics", "acc,none": 0.38095238095238093, "acc_stderr,none": 0.025010749116137595 }, "mmlu_high_school_biology": { "alias": " - high_school_biology", "acc,none": 0.44516129032258067, "acc_stderr,none": 0.028272410186214906 }, "mmlu_high_school_chemistry": { "alias": " - high_school_chemistry", "acc,none": 0.37438423645320196, "acc_stderr,none": 0.03405155380561952 }, "mmlu_high_school_computer_science": { "alias": " - high_school_computer_science", "acc,none": 0.51, "acc_stderr,none": 0.05024183937956912 }, "mmlu_high_school_mathematics": { "alias": " - high_school_mathematics", "acc,none": 0.3148148148148148, "acc_stderr,none": 0.028317533496066475 }, "mmlu_high_school_physics": { "alias": " - high_school_physics", "acc,none": 0.33774834437086093, "acc_stderr,none": 0.03861557546255169 }, "mmlu_high_school_statistics": { "alias": " - high_school_statistics", "acc,none": 0.3333333333333333, "acc_stderr,none": 0.03214952147802749 }, "mmlu_machine_learning": { "alias": " - machine_learning", "acc,none": 0.3482142857142857, "acc_stderr,none": 0.04521829902833586 } }, "groups": { "mmlu": { "acc,none": 0.3726677111522575, "acc_stderr,none": 0.0040496674485089226, "alias": "mmlu" }, "mmlu_humanities": { "acc,none": 0.34537725823591925, "acc_stderr,none": 0.006847247938989206, "alias": " - humanities" }, "mmlu_other": { "acc,none": 0.3855809462504023, "acc_stderr,none": 0.008716859594007323, "alias": " - other" }, "mmlu_social_sciences": { "acc,none": 0.40038999025024374, "acc_stderr,none": 0.008797293522352105, "alias": " - social sciences" }, "mmlu_stem": { "acc,none": 0.37361243260386934, "acc_stderr,none": 0.008566808416312282, "alias": " - stem" } }, "group_subtasks": { "mmlu_humanities": [ "mmlu_formal_logic", "mmlu_high_school_european_history", "mmlu_high_school_us_history", "mmlu_high_school_world_history", "mmlu_international_law", "mmlu_jurisprudence", "mmlu_logical_fallacies", "mmlu_moral_disputes", "mmlu_moral_scenarios", "mmlu_philosophy", "mmlu_prehistory", "mmlu_professional_law", "mmlu_world_religions" ], "mmlu_social_sciences": [ "mmlu_econometrics", "mmlu_high_school_geography", "mmlu_high_school_government_and_politics", "mmlu_high_school_macroeconomics", "mmlu_high_school_microeconomics", "mmlu_high_school_psychology", "mmlu_human_sexuality", "mmlu_professional_psychology", "mmlu_public_relations", "mmlu_security_studies", "mmlu_sociology", "mmlu_us_foreign_policy" ], "mmlu_other": [ "mmlu_business_ethics", "mmlu_clinical_knowledge", "mmlu_college_medicine", "mmlu_global_facts", "mmlu_human_aging", "mmlu_management", "mmlu_marketing", "mmlu_medical_genetics", "mmlu_miscellaneous", "mmlu_nutrition", "mmlu_professional_accounting", "mmlu_professional_medicine", "mmlu_virology" ], "mmlu_stem": [ "mmlu_abstract_algebra", "mmlu_anatomy", "mmlu_astronomy", "mmlu_college_biology", "mmlu_college_chemistry", "mmlu_college_computer_science", "mmlu_college_mathematics", "mmlu_college_physics", "mmlu_computer_security", "mmlu_conceptual_physics", "mmlu_electrical_engineering", "mmlu_elementary_mathematics", "mmlu_high_school_biology", "mmlu_high_school_chemistry", "mmlu_high_school_computer_science", "mmlu_high_school_mathematics", "mmlu_high_school_physics", "mmlu_high_school_statistics", "mmlu_machine_learning" ], "mmlu": [ "mmlu_stem", "mmlu_other", "mmlu_social_sciences", "mmlu_humanities" ] }, "configs": { "mmlu_abstract_algebra": { "task": "mmlu_abstract_algebra", "task_alias": "abstract_algebra", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "abstract_algebra", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about abstract algebra.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_anatomy": { "task": "mmlu_anatomy", "task_alias": "anatomy", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "anatomy", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about anatomy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_astronomy": { "task": "mmlu_astronomy", "task_alias": "astronomy", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "astronomy", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about astronomy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_business_ethics": { "task": "mmlu_business_ethics", "task_alias": "business_ethics", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "business_ethics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about business ethics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_clinical_knowledge": { "task": "mmlu_clinical_knowledge", "task_alias": "clinical_knowledge", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "clinical_knowledge", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about clinical knowledge.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_college_biology": { "task": "mmlu_college_biology", "task_alias": "college_biology", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_biology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college biology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_college_chemistry": { "task": "mmlu_college_chemistry", "task_alias": "college_chemistry", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_chemistry", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college chemistry.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_college_computer_science": { "task": "mmlu_college_computer_science", "task_alias": "college_computer_science", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_computer_science", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college computer science.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_college_mathematics": { "task": "mmlu_college_mathematics", "task_alias": "college_mathematics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_mathematics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college mathematics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_college_medicine": { "task": "mmlu_college_medicine", "task_alias": "college_medicine", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_medicine", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college medicine.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_college_physics": { "task": "mmlu_college_physics", "task_alias": "college_physics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "college_physics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about college physics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_computer_security": { "task": "mmlu_computer_security", "task_alias": "computer_security", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "computer_security", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about computer security.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_conceptual_physics": { "task": "mmlu_conceptual_physics", "task_alias": "conceptual_physics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "conceptual_physics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about conceptual physics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_econometrics": { "task": "mmlu_econometrics", "task_alias": "econometrics", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "econometrics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about econometrics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_electrical_engineering": { "task": "mmlu_electrical_engineering", "task_alias": "electrical_engineering", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "electrical_engineering", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about electrical engineering.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_elementary_mathematics": { "task": "mmlu_elementary_mathematics", "task_alias": "elementary_mathematics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "elementary_mathematics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about elementary mathematics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_formal_logic": { "task": "mmlu_formal_logic", "task_alias": "formal_logic", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "formal_logic", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about formal logic.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_global_facts": { "task": "mmlu_global_facts", "task_alias": "global_facts", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "global_facts", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about global facts.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_biology": { "task": "mmlu_high_school_biology", "task_alias": "high_school_biology", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_biology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school biology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_chemistry": { "task": "mmlu_high_school_chemistry", "task_alias": "high_school_chemistry", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_chemistry", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school chemistry.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_computer_science": { "task": "mmlu_high_school_computer_science", "task_alias": "high_school_computer_science", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_computer_science", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school computer science.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_european_history": { "task": "mmlu_high_school_european_history", "task_alias": "high_school_european_history", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_european_history", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school european history.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_geography": { "task": "mmlu_high_school_geography", "task_alias": "high_school_geography", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_geography", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school geography.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_government_and_politics": { "task": "mmlu_high_school_government_and_politics", "task_alias": "high_school_government_and_politics", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_government_and_politics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school government and politics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_macroeconomics": { "task": "mmlu_high_school_macroeconomics", "task_alias": "high_school_macroeconomics", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_macroeconomics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school macroeconomics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_mathematics": { "task": "mmlu_high_school_mathematics", "task_alias": "high_school_mathematics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_mathematics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school mathematics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_microeconomics": { "task": "mmlu_high_school_microeconomics", "task_alias": "high_school_microeconomics", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_microeconomics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school microeconomics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_physics": { "task": "mmlu_high_school_physics", "task_alias": "high_school_physics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_physics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school physics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_psychology": { "task": "mmlu_high_school_psychology", "task_alias": "high_school_psychology", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_psychology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school psychology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_statistics": { "task": "mmlu_high_school_statistics", "task_alias": "high_school_statistics", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_statistics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school statistics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_us_history": { "task": "mmlu_high_school_us_history", "task_alias": "high_school_us_history", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_us_history", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school us history.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_high_school_world_history": { "task": "mmlu_high_school_world_history", "task_alias": "high_school_world_history", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "high_school_world_history", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about high school world history.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_human_aging": { "task": "mmlu_human_aging", "task_alias": "human_aging", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "human_aging", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about human aging.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_human_sexuality": { "task": "mmlu_human_sexuality", "task_alias": "human_sexuality", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "human_sexuality", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about human sexuality.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_international_law": { "task": "mmlu_international_law", "task_alias": "international_law", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "international_law", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about international law.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_jurisprudence": { "task": "mmlu_jurisprudence", "task_alias": "jurisprudence", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "jurisprudence", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about jurisprudence.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_logical_fallacies": { "task": "mmlu_logical_fallacies", "task_alias": "logical_fallacies", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "logical_fallacies", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about logical fallacies.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_machine_learning": { "task": "mmlu_machine_learning", "task_alias": "machine_learning", "tag": "mmlu_stem_tasks", "dataset_path": "cais/mmlu", "dataset_name": "machine_learning", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about machine learning.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_management": { "task": "mmlu_management", "task_alias": "management", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "management", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about management.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_marketing": { "task": "mmlu_marketing", "task_alias": "marketing", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "marketing", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about marketing.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_medical_genetics": { "task": "mmlu_medical_genetics", "task_alias": "medical_genetics", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "medical_genetics", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about medical genetics.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_miscellaneous": { "task": "mmlu_miscellaneous", "task_alias": "miscellaneous", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "miscellaneous", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about miscellaneous.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_moral_disputes": { "task": "mmlu_moral_disputes", "task_alias": "moral_disputes", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "moral_disputes", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about moral disputes.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_moral_scenarios": { "task": "mmlu_moral_scenarios", "task_alias": "moral_scenarios", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "moral_scenarios", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about moral scenarios.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_nutrition": { "task": "mmlu_nutrition", "task_alias": "nutrition", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "nutrition", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about nutrition.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_philosophy": { "task": "mmlu_philosophy", "task_alias": "philosophy", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "philosophy", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about philosophy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_prehistory": { "task": "mmlu_prehistory", "task_alias": "prehistory", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "prehistory", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about prehistory.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_professional_accounting": { "task": "mmlu_professional_accounting", "task_alias": "professional_accounting", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "professional_accounting", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional accounting.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_professional_law": { "task": "mmlu_professional_law", "task_alias": "professional_law", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "professional_law", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional law.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_professional_medicine": { "task": "mmlu_professional_medicine", "task_alias": "professional_medicine", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "professional_medicine", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional medicine.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_professional_psychology": { "task": "mmlu_professional_psychology", "task_alias": "professional_psychology", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "professional_psychology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about professional psychology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_public_relations": { "task": "mmlu_public_relations", "task_alias": "public_relations", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "public_relations", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about public relations.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_security_studies": { "task": "mmlu_security_studies", "task_alias": "security_studies", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "security_studies", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about security studies.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_sociology": { "task": "mmlu_sociology", "task_alias": "sociology", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "sociology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about sociology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_us_foreign_policy": { "task": "mmlu_us_foreign_policy", "task_alias": "us_foreign_policy", "tag": "mmlu_social_sciences_tasks", "dataset_path": "cais/mmlu", "dataset_name": "us_foreign_policy", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about us foreign policy.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_virology": { "task": "mmlu_virology", "task_alias": "virology", "tag": "mmlu_other_tasks", "dataset_path": "cais/mmlu", "dataset_name": "virology", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about virology.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } }, "mmlu_world_religions": { "task": "mmlu_world_religions", "task_alias": "world_religions", "tag": "mmlu_humanities_tasks", "dataset_path": "cais/mmlu", "dataset_name": "world_religions", "test_split": "test", "fewshot_split": "dev", "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_target": "answer", "unsafe_code": false, "doc_to_choice": [ "A", "B", "C", "D" ], "description": "The following are multiple choice questions (with answers) about world religions.\n\n", "target_delimiter": " ", "fewshot_delimiter": "\n\n", "fewshot_config": { "sampler": "first_n", "split": "dev", "process_docs": null, "fewshot_indices": null, "samples": null, "doc_to_text": "{{question.strip()}}\nA. {{choices[0]}}\nB. {{choices[1]}}\nC. {{choices[2]}}\nD. {{choices[3]}}\nAnswer:", "doc_to_choice": [ "A", "B", "C", "D" ], "doc_to_target": "answer", "gen_prefix": null, "fewshot_delimiter": "\n\n", "target_delimiter": " " }, "num_fewshot": 5, "metric_list": [ { "metric": "acc", "aggregation": "mean", "higher_is_better": true } ], "output_type": "multiple_choice", "repeats": 1, "should_decontaminate": false, "metadata": { "version": 1.0, "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true } } }, "versions": { "mmlu": 2, "mmlu_abstract_algebra": 1.0, "mmlu_anatomy": 1.0, "mmlu_astronomy": 1.0, "mmlu_business_ethics": 1.0, "mmlu_clinical_knowledge": 1.0, "mmlu_college_biology": 1.0, "mmlu_college_chemistry": 1.0, "mmlu_college_computer_science": 1.0, "mmlu_college_mathematics": 1.0, "mmlu_college_medicine": 1.0, "mmlu_college_physics": 1.0, "mmlu_computer_security": 1.0, "mmlu_conceptual_physics": 1.0, "mmlu_econometrics": 1.0, "mmlu_electrical_engineering": 1.0, "mmlu_elementary_mathematics": 1.0, "mmlu_formal_logic": 1.0, "mmlu_global_facts": 1.0, "mmlu_high_school_biology": 1.0, "mmlu_high_school_chemistry": 1.0, "mmlu_high_school_computer_science": 1.0, "mmlu_high_school_european_history": 1.0, "mmlu_high_school_geography": 1.0, "mmlu_high_school_government_and_politics": 1.0, "mmlu_high_school_macroeconomics": 1.0, "mmlu_high_school_mathematics": 1.0, "mmlu_high_school_microeconomics": 1.0, "mmlu_high_school_physics": 1.0, "mmlu_high_school_psychology": 1.0, "mmlu_high_school_statistics": 1.0, "mmlu_high_school_us_history": 1.0, "mmlu_high_school_world_history": 1.0, "mmlu_human_aging": 1.0, "mmlu_human_sexuality": 1.0, "mmlu_humanities": 2, "mmlu_international_law": 1.0, "mmlu_jurisprudence": 1.0, "mmlu_logical_fallacies": 1.0, "mmlu_machine_learning": 1.0, "mmlu_management": 1.0, "mmlu_marketing": 1.0, "mmlu_medical_genetics": 1.0, "mmlu_miscellaneous": 1.0, "mmlu_moral_disputes": 1.0, "mmlu_moral_scenarios": 1.0, "mmlu_nutrition": 1.0, "mmlu_other": 2, "mmlu_philosophy": 1.0, "mmlu_prehistory": 1.0, "mmlu_professional_accounting": 1.0, "mmlu_professional_law": 1.0, "mmlu_professional_medicine": 1.0, "mmlu_professional_psychology": 1.0, "mmlu_public_relations": 1.0, "mmlu_security_studies": 1.0, "mmlu_social_sciences": 2, "mmlu_sociology": 1.0, "mmlu_stem": 2, "mmlu_us_foreign_policy": 1.0, "mmlu_virology": 1.0, "mmlu_world_religions": 1.0 }, "n-shot": { "mmlu_abstract_algebra": 5, "mmlu_anatomy": 5, "mmlu_astronomy": 5, "mmlu_business_ethics": 5, "mmlu_clinical_knowledge": 5, "mmlu_college_biology": 5, "mmlu_college_chemistry": 5, "mmlu_college_computer_science": 5, "mmlu_college_mathematics": 5, "mmlu_college_medicine": 5, "mmlu_college_physics": 5, "mmlu_computer_security": 5, "mmlu_conceptual_physics": 5, "mmlu_econometrics": 5, "mmlu_electrical_engineering": 5, "mmlu_elementary_mathematics": 5, "mmlu_formal_logic": 5, "mmlu_global_facts": 5, "mmlu_high_school_biology": 5, "mmlu_high_school_chemistry": 5, "mmlu_high_school_computer_science": 5, "mmlu_high_school_european_history": 5, "mmlu_high_school_geography": 5, "mmlu_high_school_government_and_politics": 5, "mmlu_high_school_macroeconomics": 5, "mmlu_high_school_mathematics": 5, "mmlu_high_school_microeconomics": 5, "mmlu_high_school_physics": 5, "mmlu_high_school_psychology": 5, "mmlu_high_school_statistics": 5, "mmlu_high_school_us_history": 5, "mmlu_high_school_world_history": 5, "mmlu_human_aging": 5, "mmlu_human_sexuality": 5, "mmlu_international_law": 5, "mmlu_jurisprudence": 5, "mmlu_logical_fallacies": 5, "mmlu_machine_learning": 5, "mmlu_management": 5, "mmlu_marketing": 5, "mmlu_medical_genetics": 5, "mmlu_miscellaneous": 5, "mmlu_moral_disputes": 5, "mmlu_moral_scenarios": 5, "mmlu_nutrition": 5, "mmlu_philosophy": 5, "mmlu_prehistory": 5, "mmlu_professional_accounting": 5, "mmlu_professional_law": 5, "mmlu_professional_medicine": 5, "mmlu_professional_psychology": 5, "mmlu_public_relations": 5, "mmlu_security_studies": 5, "mmlu_sociology": 5, "mmlu_us_foreign_policy": 5, "mmlu_virology": 5, "mmlu_world_religions": 5 }, "higher_is_better": { "mmlu": { "acc": true }, "mmlu_abstract_algebra": { "acc": true }, "mmlu_anatomy": { "acc": true }, "mmlu_astronomy": { "acc": true }, "mmlu_business_ethics": { "acc": true }, "mmlu_clinical_knowledge": { "acc": true }, "mmlu_college_biology": { "acc": true }, "mmlu_college_chemistry": { "acc": true }, "mmlu_college_computer_science": { "acc": true }, "mmlu_college_mathematics": { "acc": true }, "mmlu_college_medicine": { "acc": true }, "mmlu_college_physics": { "acc": true }, "mmlu_computer_security": { "acc": true }, "mmlu_conceptual_physics": { "acc": true }, "mmlu_econometrics": { "acc": true }, "mmlu_electrical_engineering": { "acc": true }, "mmlu_elementary_mathematics": { "acc": true }, "mmlu_formal_logic": { "acc": true }, "mmlu_global_facts": { "acc": true }, "mmlu_high_school_biology": { "acc": true }, "mmlu_high_school_chemistry": { "acc": true }, "mmlu_high_school_computer_science": { "acc": true }, "mmlu_high_school_european_history": { "acc": true }, "mmlu_high_school_geography": { "acc": true }, "mmlu_high_school_government_and_politics": { "acc": true }, "mmlu_high_school_macroeconomics": { "acc": true }, "mmlu_high_school_mathematics": { "acc": true }, "mmlu_high_school_microeconomics": { "acc": true }, "mmlu_high_school_physics": { "acc": true }, "mmlu_high_school_psychology": { "acc": true }, "mmlu_high_school_statistics": { "acc": true }, "mmlu_high_school_us_history": { "acc": true }, "mmlu_high_school_world_history": { "acc": true }, "mmlu_human_aging": { "acc": true }, "mmlu_human_sexuality": { "acc": true }, "mmlu_humanities": { "acc": true }, "mmlu_international_law": { "acc": true }, "mmlu_jurisprudence": { "acc": true }, "mmlu_logical_fallacies": { "acc": true }, "mmlu_machine_learning": { "acc": true }, "mmlu_management": { "acc": true }, "mmlu_marketing": { "acc": true }, "mmlu_medical_genetics": { "acc": true }, "mmlu_miscellaneous": { "acc": true }, "mmlu_moral_disputes": { "acc": true }, "mmlu_moral_scenarios": { "acc": true }, "mmlu_nutrition": { "acc": true }, "mmlu_other": { "acc": true }, "mmlu_philosophy": { "acc": true }, "mmlu_prehistory": { "acc": true }, "mmlu_professional_accounting": { "acc": true }, "mmlu_professional_law": { "acc": true }, "mmlu_professional_medicine": { "acc": true }, "mmlu_professional_psychology": { "acc": true }, "mmlu_public_relations": { "acc": true }, "mmlu_security_studies": { "acc": true }, "mmlu_social_sciences": { "acc": true }, "mmlu_sociology": { "acc": true }, "mmlu_stem": { "acc": true }, "mmlu_us_foreign_policy": { "acc": true }, "mmlu_virology": { "acc": true }, "mmlu_world_religions": { "acc": true } }, "n-samples": { "mmlu_abstract_algebra": { "original": 100, "effective": 100 }, "mmlu_anatomy": { "original": 135, "effective": 135 }, "mmlu_astronomy": { "original": 152, "effective": 152 }, "mmlu_college_biology": { "original": 144, "effective": 144 }, "mmlu_college_chemistry": { "original": 100, "effective": 100 }, "mmlu_college_computer_science": { "original": 100, "effective": 100 }, "mmlu_college_mathematics": { "original": 100, "effective": 100 }, "mmlu_college_physics": { "original": 102, "effective": 102 }, "mmlu_computer_security": { "original": 100, "effective": 100 }, "mmlu_conceptual_physics": { "original": 235, "effective": 235 }, "mmlu_electrical_engineering": { "original": 145, "effective": 145 }, "mmlu_elementary_mathematics": { "original": 378, "effective": 378 }, "mmlu_high_school_biology": { "original": 310, "effective": 310 }, "mmlu_high_school_chemistry": { "original": 203, "effective": 203 }, "mmlu_high_school_computer_science": { "original": 100, "effective": 100 }, "mmlu_high_school_mathematics": { "original": 270, "effective": 270 }, "mmlu_high_school_physics": { "original": 151, "effective": 151 }, "mmlu_high_school_statistics": { "original": 216, "effective": 216 }, "mmlu_machine_learning": { "original": 112, "effective": 112 }, "mmlu_business_ethics": { "original": 100, "effective": 100 }, "mmlu_clinical_knowledge": { "original": 265, "effective": 265 }, "mmlu_college_medicine": { "original": 173, "effective": 173 }, "mmlu_global_facts": { "original": 100, "effective": 100 }, "mmlu_human_aging": { "original": 223, "effective": 223 }, "mmlu_management": { "original": 103, "effective": 103 }, "mmlu_marketing": { "original": 234, "effective": 234 }, "mmlu_medical_genetics": { "original": 100, "effective": 100 }, "mmlu_miscellaneous": { "original": 783, "effective": 783 }, "mmlu_nutrition": { "original": 306, "effective": 306 }, "mmlu_professional_accounting": { "original": 282, "effective": 282 }, "mmlu_professional_medicine": { "original": 272, "effective": 272 }, "mmlu_virology": { "original": 166, "effective": 166 }, "mmlu_econometrics": { "original": 114, "effective": 114 }, "mmlu_high_school_geography": { "original": 198, "effective": 198 }, "mmlu_high_school_government_and_politics": { "original": 193, "effective": 193 }, "mmlu_high_school_macroeconomics": { "original": 390, "effective": 390 }, "mmlu_high_school_microeconomics": { "original": 238, "effective": 238 }, "mmlu_high_school_psychology": { "original": 545, "effective": 545 }, "mmlu_human_sexuality": { "original": 131, "effective": 131 }, "mmlu_professional_psychology": { "original": 612, "effective": 612 }, "mmlu_public_relations": { "original": 110, "effective": 110 }, "mmlu_security_studies": { "original": 245, "effective": 245 }, "mmlu_sociology": { "original": 201, "effective": 201 }, "mmlu_us_foreign_policy": { "original": 100, "effective": 100 }, "mmlu_formal_logic": { "original": 126, "effective": 126 }, "mmlu_high_school_european_history": { "original": 165, "effective": 165 }, "mmlu_high_school_us_history": { "original": 204, "effective": 204 }, "mmlu_high_school_world_history": { "original": 237, "effective": 237 }, "mmlu_international_law": { "original": 121, "effective": 121 }, "mmlu_jurisprudence": { "original": 108, "effective": 108 }, "mmlu_logical_fallacies": { "original": 163, "effective": 163 }, "mmlu_moral_disputes": { "original": 346, "effective": 346 }, "mmlu_moral_scenarios": { "original": 895, "effective": 895 }, "mmlu_philosophy": { "original": 311, "effective": 311 }, "mmlu_prehistory": { "original": 324, "effective": 324 }, "mmlu_professional_law": { "original": 1534, "effective": 1534 }, "mmlu_world_religions": { "original": 171, "effective": 171 } }, "config": { "model": "hf", "model_args": { "pretrained": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "use_auth_token": true, "trust_remote_code": true }, "model_num_parameters": 1547239588, "model_dtype": "torch.bfloat16", "model_revision": "main", "model_sha": "8577a2ddd87dc19c99e07cf288bf43b8f157dd30", "batch_size": "auto", "batch_sizes": [ 7 ], "device": "cuda:0", "use_cache": null, "limit": null, "bootstrap_iters": 100000, "gen_kwargs": {}, "random_seed": 0, "numpy_seed": 1234, "torch_seed": 1234, "fewshot_seed": 1234 }, "git_hash": null, "date": 1769177671.225445, "pretty_env_info": "PyTorch version: 2.9.0+cu128\nIs debug build: False\nCUDA used to build PyTorch: 12.8\nROCM used to build PyTorch: N/A\n\nOS: Ubuntu 22.04.5 LTS (x86_64)\nGCC version: (Ubuntu 11.4.0-1ubuntu1~22.04.2) 11.4.0\nClang version: Could not collect\nCMake version: Could not collect\nLibc version: glibc-2.35\n\nPython version: 3.10.12 (main, Jan 8 2026, 06:52:19) [GCC 11.4.0] (64-bit runtime)\nPython platform: Linux-6.6.87.2-microsoft-standard-WSL2-x86_64-with-glibc2.35\nIs CUDA available: True\nCUDA runtime version: Could not collect\nCUDA_MODULE_LOADING set to: \nGPU models and configuration: GPU 0: NVIDIA GeForce RTX 4070\nNvidia driver version: 591.74\ncuDNN version: Could not collect\nIs XPU available: False\nHIP runtime version: N/A\nMIOpen runtime version: N/A\nIs XNNPACK available: True\n\nCPU:\nArchitecture: x86_64\nCPU op-mode(s): 32-bit, 64-bit\nAddress sizes: 48 bits physical, 48 bits virtual\nByte Order: Little Endian\nCPU(s): 12\nOn-line CPU(s) list: 0-11\nVendor ID: AuthenticAMD\nModel name: AMD Ryzen 5 7600 6-Core Processor\nCPU family: 25\nModel: 97\nThread(s) per core: 2\nCore(s) per socket: 6\nSocket(s): 1\nStepping: 2\nBogoMIPS: 7585.57\nFlags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush mmx fxsr sse sse2 ht syscall nx mmxext fxsr_opt pdpe1gb rdtscp lm constant_tsc rep_good nopl tsc_reliable nonstop_tsc cpuid extd_apicid tsc_known_freq pni pclmulqdq ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm cmp_legacy svm cr8_legacy abm sse4a misalignsse 3dnowprefetch osvw topoext perfctr_core ssbd ibrs ibpb stibp vmmcall fsgsbase bmi1 avx2 smep bmi2 erms invpcid avx512f avx512dq rdseed adx smap avx512ifma clflushopt clwb avx512cd sha_ni avx512bw avx512vl xsaveopt xsavec xgetbv1 xsaves avx512_bf16 clzero xsaveerptr arat npt nrip_save tsc_scale vmcb_clean flushbyasid decodeassists pausefilter pfthreshold v_vmsave_vmload avx512vbmi umip avx512_vbmi2 gfni vaes vpclmulqdq avx512_vnni avx512_bitalg avx512_vpopcntdq rdpid fsrm\nVirtualization: AMD-V\nHypervisor vendor: Microsoft\nVirtualization type: full\nL1d cache: 192 KiB (6 instances)\nL1i cache: 192 KiB (6 instances)\nL2 cache: 6 MiB (6 instances)\nL3 cache: 32 MiB (1 instance)\nNUMA node(s): 1\nNUMA node0 CPU(s): 0-11\nVulnerability Gather data sampling: Not affected\nVulnerability Itlb multihit: Not affected\nVulnerability L1tf: Not affected\nVulnerability Mds: Not affected\nVulnerability Meltdown: Not affected\nVulnerability Mmio stale data: Not affected\nVulnerability Reg file data sampling: Not affected\nVulnerability Retbleed: Not affected\nVulnerability Spec rstack overflow: Vulnerable: Safe RET, no microcode\nVulnerability Spec store bypass: Mitigation; Speculative Store Bypass disabled via prctl\nVulnerability Spectre v1: Mitigation; usercopy/swapgs barriers and __user pointer sanitization\nVulnerability Spectre v2: Mitigation; Retpolines; IBPB conditional; IBRS_FW; STIBP always-on; RSB filling; PBRSB-eIBRS Not affected; BHI Not affected\nVulnerability Srbds: Not affected\nVulnerability Tsx async abort: Not affected\n\nVersions of relevant libraries:\n[pip3] numpy==2.2.6\n[pip3] nvidia-cublas-cu12==12.8.4.1\n[pip3] nvidia-cuda-cupti-cu12==12.8.90\n[pip3] nvidia-cuda-nvrtc-cu12==12.8.93\n[pip3] nvidia-cuda-runtime-cu12==12.8.90\n[pip3] nvidia-cudnn-cu12==9.10.2.21\n[pip3] nvidia-cudnn-frontend==1.17.0\n[pip3] nvidia-cufft-cu12==11.3.3.83\n[pip3] nvidia-curand-cu12==10.3.9.90\n[pip3] nvidia-cusolver-cu12==11.7.3.90\n[pip3] nvidia-cusparse-cu12==12.5.8.93\n[pip3] nvidia-cusparselt-cu12==0.7.1\n[pip3] nvidia-nccl-cu12==2.27.5\n[pip3] nvidia-nvjitlink-cu12==12.8.93\n[pip3] nvidia-nvtx-cu12==12.8.90\n[pip3] torch==2.9.0\n[pip3] torchaudio==2.9.0\n[pip3] torchvision==0.24.0\n[pip3] triton==3.5.0\n[conda] Could not collect", "transformers_version": "4.57.6", "lm_eval_version": "0.4.10.dev0", "upper_git_hash": null, "tokenizer_pad_token": [ "[PAD]", "0" ], "tokenizer_eos_token": [ "[|endofturn|]", "361" ], "tokenizer_bos_token": [ "[BOS]", "1" ], "eot_token_id": 361, "max_length": 65536, "task_hashes": {}, "model_source": "hf", "model_name": "MangoLab/EXAONE-4.0-1.2B-GPTQ-W8A16", "model_name_sanitized": "MangoLab__EXAONE-4.0-1.2B-GPTQ-W8A16", "system_instruction": null, "system_instruction_sha": null, "fewshot_as_multiturn": null, "chat_template": null, "chat_template_sha": null, "total_evaluation_time_seconds": "4252.945384232" }