task_id stringlengths 8 69 | name stringlengths 3 64 | suite stringclasses 6
values | hf_repo stringclasses 125
values | hf_subset stringlengths 0 55 | file_path stringclasses 6
values | line_number int64 36 22.8k | variable_name stringclasses 1
value | is_subtask bool 2
classes | main_task stringclasses 122
values | subtask_count int64 1 1 | suites listlengths 1 1 | subtasks listlengths 1 1 | is_standalone bool 2
classes |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
leaderboard:mmlu:logical_fallacies | mmlu:logical_fallacies | leaderboard | lighteval/mmlu | logical_fallacies | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,298 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:logical_fallacies"
] | false |
helm:mmlu:logical_fallacies | mmlu:logical_fallacies | helm | lighteval/mmlu | logical_fallacies | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,313 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:logical_fallacies"
] | false |
original:mmlu:machine_learning | mmlu:machine_learning | original | cais/mmlu | machine_learning | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,339 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:machine_learning"
] | false |
leaderboard:mmlu:machine_learning | mmlu:machine_learning | leaderboard | lighteval/mmlu | machine_learning | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,354 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:machine_learning"
] | false |
helm:mmlu:machine_learning | mmlu:machine_learning | helm | lighteval/mmlu | machine_learning | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,369 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:machine_learning"
] | false |
original:mmlu:management | mmlu:management | original | cais/mmlu | management | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,395 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:management"
] | false |
leaderboard:mmlu:management | mmlu:management | leaderboard | lighteval/mmlu | management | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,410 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:management"
] | false |
helm:mmlu:management | mmlu:management | helm | lighteval/mmlu | management | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,425 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:management"
] | false |
original:mmlu:marketing | mmlu:marketing | original | cais/mmlu | marketing | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,451 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:marketing"
] | false |
leaderboard:mmlu:marketing | mmlu:marketing | leaderboard | lighteval/mmlu | marketing | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,466 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:marketing"
] | false |
helm:mmlu:marketing | mmlu:marketing | helm | lighteval/mmlu | marketing | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,481 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:marketing"
] | false |
original:mmlu:medical_genetics | mmlu:medical_genetics | original | cais/mmlu | medical_genetics | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,507 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:medical_genetics"
] | false |
leaderboard:mmlu:medical_genetics | mmlu:medical_genetics | leaderboard | lighteval/mmlu | medical_genetics | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,522 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:medical_genetics"
] | false |
helm:mmlu:medical_genetics | mmlu:medical_genetics | helm | lighteval/mmlu | medical_genetics | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,537 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:medical_genetics"
] | false |
original:mmlu:miscellaneous | mmlu:miscellaneous | original | cais/mmlu | miscellaneous | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,563 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:miscellaneous"
] | false |
leaderboard:mmlu:miscellaneous | mmlu:miscellaneous | leaderboard | lighteval/mmlu | miscellaneous | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,578 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:miscellaneous"
] | false |
helm:mmlu:miscellaneous | mmlu:miscellaneous | helm | lighteval/mmlu | miscellaneous | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,593 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:miscellaneous"
] | false |
original:mmlu:moral_disputes | mmlu:moral_disputes | original | cais/mmlu | moral_disputes | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,619 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:moral_disputes"
] | false |
leaderboard:mmlu:moral_disputes | mmlu:moral_disputes | leaderboard | lighteval/mmlu | moral_disputes | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,634 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:moral_disputes"
] | false |
helm:mmlu:moral_disputes | mmlu:moral_disputes | helm | lighteval/mmlu | moral_disputes | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,649 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:moral_disputes"
] | false |
original:mmlu:moral_scenarios | mmlu:moral_scenarios | original | cais/mmlu | moral_scenarios | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,675 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:moral_scenarios"
] | false |
leaderboard:mmlu:moral_scenarios | mmlu:moral_scenarios | leaderboard | lighteval/mmlu | moral_scenarios | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,690 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:moral_scenarios"
] | false |
helm:mmlu:moral_scenarios | mmlu:moral_scenarios | helm | lighteval/mmlu | moral_scenarios | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,705 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:moral_scenarios"
] | false |
original:mmlu:nutrition | mmlu:nutrition | original | cais/mmlu | nutrition | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,731 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:nutrition"
] | false |
leaderboard:mmlu:nutrition | mmlu:nutrition | leaderboard | lighteval/mmlu | nutrition | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,746 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:nutrition"
] | false |
helm:mmlu:nutrition | mmlu:nutrition | helm | lighteval/mmlu | nutrition | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,761 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:nutrition"
] | false |
original:mmlu:philosophy | mmlu:philosophy | original | cais/mmlu | philosophy | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,787 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:philosophy"
] | false |
leaderboard:mmlu:philosophy | mmlu:philosophy | leaderboard | lighteval/mmlu | philosophy | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,802 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:philosophy"
] | false |
helm:mmlu:philosophy | mmlu:philosophy | helm | lighteval/mmlu | philosophy | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,817 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:philosophy"
] | false |
original:mmlu:prehistory | mmlu:prehistory | original | cais/mmlu | prehistory | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,843 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:prehistory"
] | false |
leaderboard:mmlu:prehistory | mmlu:prehistory | leaderboard | lighteval/mmlu | prehistory | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,858 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:prehistory"
] | false |
helm:mmlu:prehistory | mmlu:prehistory | helm | lighteval/mmlu | prehistory | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,873 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:prehistory"
] | false |
original:mmlu:professional_accounting | mmlu:professional_accounting | original | cais/mmlu | professional_accounting | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,899 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:professional_accounting"
] | false |
leaderboard:mmlu:professional_accounting | mmlu:professional_accounting | leaderboard | lighteval/mmlu | professional_accounting | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,914 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:professional_accounting"
] | false |
helm:mmlu:professional_accounting | mmlu:professional_accounting | helm | lighteval/mmlu | professional_accounting | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,929 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:professional_accounting"
] | false |
original:mmlu:professional_law | mmlu:professional_law | original | cais/mmlu | professional_law | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,955 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:professional_law"
] | false |
leaderboard:mmlu:professional_law | mmlu:professional_law | leaderboard | lighteval/mmlu | professional_law | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,970 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:professional_law"
] | false |
helm:mmlu:professional_law | mmlu:professional_law | helm | lighteval/mmlu | professional_law | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 13,985 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:professional_law"
] | false |
original:mmlu:professional_medicine | mmlu:professional_medicine | original | cais/mmlu | professional_medicine | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,011 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:professional_medicine"
] | false |
leaderboard:mmlu:professional_medicine | mmlu:professional_medicine | leaderboard | lighteval/mmlu | professional_medicine | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,026 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:professional_medicine"
] | false |
helm:mmlu:professional_medicine | mmlu:professional_medicine | helm | lighteval/mmlu | professional_medicine | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,041 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:professional_medicine"
] | false |
original:mmlu:professional_psychology | mmlu:professional_psychology | original | cais/mmlu | professional_psychology | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,067 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:professional_psychology"
] | false |
leaderboard:mmlu:professional_psychology | mmlu:professional_psychology | leaderboard | lighteval/mmlu | professional_psychology | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,082 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:professional_psychology"
] | false |
helm:mmlu:professional_psychology | mmlu:professional_psychology | helm | lighteval/mmlu | professional_psychology | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,097 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:professional_psychology"
] | false |
original:mmlu:public_relations | mmlu:public_relations | original | cais/mmlu | public_relations | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,123 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:public_relations"
] | false |
leaderboard:mmlu:public_relations | mmlu:public_relations | leaderboard | lighteval/mmlu | public_relations | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,138 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:public_relations"
] | false |
helm:mmlu:public_relations | mmlu:public_relations | helm | lighteval/mmlu | public_relations | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,153 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:public_relations"
] | false |
original:mmlu:security_studies | mmlu:security_studies | original | cais/mmlu | security_studies | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,179 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:security_studies"
] | false |
leaderboard:mmlu:security_studies | mmlu:security_studies | leaderboard | lighteval/mmlu | security_studies | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,194 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:security_studies"
] | false |
helm:mmlu:security_studies | mmlu:security_studies | helm | lighteval/mmlu | security_studies | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,209 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:security_studies"
] | false |
original:mmlu:sociology | mmlu:sociology | original | cais/mmlu | sociology | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,235 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:sociology"
] | false |
leaderboard:mmlu:sociology | mmlu:sociology | leaderboard | lighteval/mmlu | sociology | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,250 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:sociology"
] | false |
helm:mmlu:sociology | mmlu:sociology | helm | lighteval/mmlu | sociology | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,265 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:sociology"
] | false |
original:mmlu:us_foreign_policy | mmlu:us_foreign_policy | original | cais/mmlu | us_foreign_policy | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,291 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:us_foreign_policy"
] | false |
leaderboard:mmlu:us_foreign_policy | mmlu:us_foreign_policy | leaderboard | lighteval/mmlu | us_foreign_policy | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,306 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:us_foreign_policy"
] | false |
helm:mmlu:us_foreign_policy | mmlu:us_foreign_policy | helm | lighteval/mmlu | us_foreign_policy | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,321 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:us_foreign_policy"
] | false |
original:mmlu:virology | mmlu:virology | original | cais/mmlu | virology | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,347 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:virology"
] | false |
leaderboard:mmlu:virology | mmlu:virology | leaderboard | lighteval/mmlu | virology | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,362 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:virology"
] | false |
helm:mmlu:virology | mmlu:virology | helm | lighteval/mmlu | virology | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,377 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:virology"
] | false |
original:mmlu:world_religions | mmlu:world_religions | original | cais/mmlu | world_religions | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,403 | direct_call | true | mmlu | 1 | [
"original"
] | [
"original:mmlu:world_religions"
] | false |
leaderboard:mmlu:world_religions | mmlu:world_religions | leaderboard | lighteval/mmlu | world_religions | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,418 | direct_call | true | mmlu | 1 | [
"leaderboard"
] | [
"leaderboard:mmlu:world_religions"
] | false |
helm:mmlu:world_religions | mmlu:world_religions | helm | lighteval/mmlu | world_religions | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,433 | direct_call | true | mmlu | 1 | [
"helm"
] | [
"helm:mmlu:world_religions"
] | false |
lighteval:bigbench:mnist_ascii | bigbench:mnist_ascii | lighteval | tasksource/bigbench | mnist_ascii | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,459 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:mnist_ascii"
] | false |
lighteval:bigbench:modified_arithmetic | bigbench:modified_arithmetic | lighteval | tasksource/bigbench | modified_arithmetic | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,474 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:modified_arithmetic"
] | false |
lighteval:bigbench:moral_permissibility | bigbench:moral_permissibility | lighteval | tasksource/bigbench | moral_permissibility | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,489 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:moral_permissibility"
] | false |
lighteval:bigbench:movie_dialog_same_or_different | bigbench:movie_dialog_same_or_different | lighteval | tasksource/bigbench | movie_dialog_same_or_different | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,504 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:movie_dialog_same_or_different"
] | false |
lighteval:mtnt2019:en-fr | mtnt2019:en-fr | lighteval | lighteval/sacrebleu_manual | mtnt2019_en-fr | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,534 | direct_call | true | mtnt2019 | 1 | [
"lighteval"
] | [
"lighteval:mtnt2019:en-fr"
] | false |
lighteval:mtnt2019:en-ja | mtnt2019:en-ja | lighteval | lighteval/sacrebleu_manual | mtnt2019_en-ja | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,549 | direct_call | true | mtnt2019 | 1 | [
"lighteval"
] | [
"lighteval:mtnt2019:en-ja"
] | false |
lighteval:mtnt2019:fr-en | mtnt2019:fr-en | lighteval | lighteval/sacrebleu_manual | mtnt2019_fr-en | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,564 | direct_call | true | mtnt2019 | 1 | [
"lighteval"
] | [
"lighteval:mtnt2019:fr-en"
] | false |
lighteval:mtnt2019:ja-en | mtnt2019:ja-en | lighteval | lighteval/sacrebleu_manual | mtnt2019_ja-en | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,579 | direct_call | true | mtnt2019 | 1 | [
"lighteval"
] | [
"lighteval:mtnt2019:ja-en"
] | false |
lighteval:bigbench:mult_data_wrangling | bigbench:mult_data_wrangling | lighteval | tasksource/bigbench | mult_data_wrangling | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,594 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:mult_data_wrangling"
] | false |
lighteval:bigbench:multiemo | bigbench:multiemo | lighteval | tasksource/bigbench | multiemo | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,609 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:multiemo"
] | false |
lighteval:musr:murder_mysteries | musr:murder_mysteries | lighteval | TAUR-Lab/MuSR | default | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,624 | direct_call | true | musr | 1 | [
"lighteval"
] | [
"lighteval:musr:murder_mysteries"
] | false |
lighteval:musr:object_placements | musr:object_placements | lighteval | TAUR-Lab/MuSR | default | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,639 | direct_call | true | musr | 1 | [
"lighteval"
] | [
"lighteval:musr:object_placements"
] | false |
lighteval:musr:team_allocation | musr:team_allocation | lighteval | TAUR-Lab/MuSR | default | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,654 | direct_call | true | musr | 1 | [
"lighteval"
] | [
"lighteval:musr:team_allocation"
] | false |
lighteval:mutual | mutual | lighteval | lighteval/mutual_harness | mutual | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,669 | direct_call | false | mutual | 1 | [
"lighteval"
] | [
"lighteval:mutual"
] | true |
lighteval:mutual_plus | mutual_plus | lighteval | lighteval/mutual_harness | mutual_plus | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,684 | direct_call | false | mutual_plus | 1 | [
"lighteval"
] | [
"lighteval:mutual_plus"
] | true |
helm:narrativeqa | narrativeqa | helm | lighteval/narrative_qa_helm | default | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,699 | direct_call | false | narrativeqa | 1 | [
"helm"
] | [
"helm:narrativeqa"
] | true |
lighteval:bigbench:natural_instructions | bigbench:natural_instructions | lighteval | tasksource/bigbench | natural_instructions | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,721 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:natural_instructions"
] | false |
lighteval:natural_questions | natural_questions | lighteval | lighteval/small_natural_questions | default | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,736 | direct_call | false | natural_questions | 1 | [
"lighteval"
] | [
"lighteval:natural_questions"
] | true |
lighteval:bigbench:nonsense_words_grammar | bigbench:nonsense_words_grammar | lighteval | tasksource/bigbench | nonsense_words_grammar | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,775 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:nonsense_words_grammar"
] | false |
lighteval:bigbench_lite:novel_concepts | bigbench_lite:novel_concepts | lighteval | tasksource/bigbench | novel_concepts | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,790 | direct_call | true | bigbench_lite | 1 | [
"lighteval"
] | [
"lighteval:bigbench_lite:novel_concepts"
] | false |
helm:numeracy:linear_example | numeracy:linear_example | helm | lighteval/numeracy | linear_example | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,805 | direct_call | true | numeracy | 1 | [
"helm"
] | [
"helm:numeracy:linear_example"
] | false |
helm:numeracy:linear_standard | numeracy:linear_standard | helm | lighteval/numeracy | linear_standard | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,823 | direct_call | true | numeracy | 1 | [
"helm"
] | [
"helm:numeracy:linear_standard"
] | false |
helm:numeracy:parabola_example | numeracy:parabola_example | helm | lighteval/numeracy | parabola_example | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,841 | direct_call | true | numeracy | 1 | [
"helm"
] | [
"helm:numeracy:parabola_example"
] | false |
helm:numeracy:parabola_standard | numeracy:parabola_standard | helm | lighteval/numeracy | parabola_standard | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,859 | direct_call | true | numeracy | 1 | [
"helm"
] | [
"helm:numeracy:parabola_standard"
] | false |
helm:numeracy:paraboloid_example | numeracy:paraboloid_example | helm | lighteval/numeracy | paraboloid_example | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,877 | direct_call | true | numeracy | 1 | [
"helm"
] | [
"helm:numeracy:paraboloid_example"
] | false |
helm:numeracy:paraboloid_standard | numeracy:paraboloid_standard | helm | lighteval/numeracy | paraboloid_standard | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,895 | direct_call | true | numeracy | 1 | [
"helm"
] | [
"helm:numeracy:paraboloid_standard"
] | false |
helm:numeracy:plane_example | numeracy:plane_example | helm | lighteval/numeracy | plane_example | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,913 | direct_call | true | numeracy | 1 | [
"helm"
] | [
"helm:numeracy:plane_example"
] | false |
helm:numeracy:plane_standard | numeracy:plane_standard | helm | lighteval/numeracy | plane_standard | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,931 | direct_call | true | numeracy | 1 | [
"helm"
] | [
"helm:numeracy:plane_standard"
] | false |
lighteval:bigbench:object_counting | bigbench:object_counting | lighteval | tasksource/bigbench | object_counting | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,949 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:object_counting"
] | false |
lighteval:bigbench:odd_one_out | bigbench:odd_one_out | lighteval | tasksource/bigbench | odd_one_out | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,964 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:odd_one_out"
] | false |
helm:openbookqa | openbookqa | helm | openbookqa | main | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 14,979 | direct_call | false | openbookqa | 1 | [
"helm"
] | [
"helm:openbookqa"
] | true |
lighteval:openbookqa | openbookqa | lighteval | openbookqa | main | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 15,005 | direct_call | false | openbookqa | 1 | [
"lighteval"
] | [
"lighteval:openbookqa"
] | true |
lighteval:bigbench_lite:operators | bigbench_lite:operators | lighteval | tasksource/bigbench | operators | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 15,023 | direct_call | true | bigbench_lite | 1 | [
"lighteval"
] | [
"lighteval:bigbench_lite:operators"
] | false |
lighteval:bigbench:paragraph_segmentation | bigbench:paragraph_segmentation | lighteval | tasksource/bigbench | paragraph_segmentation | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 15,038 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:paragraph_segmentation"
] | false |
lighteval:bigbench:parsinlu_qa | bigbench:parsinlu_qa | lighteval | tasksource/bigbench | parsinlu_qa | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 15,053 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:parsinlu_qa"
] | false |
lighteval:bigbench_lite:parsinlu_reading_comprehension | bigbench_lite:parsinlu_reading_comprehension | lighteval | tasksource/bigbench | parsinlu_reading_comprehension | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 15,068 | direct_call | true | bigbench_lite | 1 | [
"lighteval"
] | [
"lighteval:bigbench_lite:parsinlu_reading_comprehension"
] | false |
lighteval:bigbench:penguins_in_a_table | bigbench:penguins_in_a_table | lighteval | tasksource/bigbench | penguins_in_a_table | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 15,083 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:penguins_in_a_table"
] | false |
lighteval:bigbench:periodic_elements | bigbench:periodic_elements | lighteval | tasksource/bigbench | periodic_elements | /Users/nathan/Repos/lighteval/src/lighteval/tasks/default_tasks.py | 15,098 | direct_call | true | bigbench | 1 | [
"lighteval"
] | [
"lighteval:bigbench:periodic_elements"
] | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.