id stringlengths 2 115 | lastModified stringlengths 24 24 | tags list | author stringlengths 2 42 ⌀ | description stringlengths 0 68.7k ⌀ | citation stringlengths 0 10.7k ⌀ | cardData null | likes int64 0 3.55k | downloads int64 0 10.1M | card stringlengths 0 1.01M |
|---|---|---|---|---|---|---|---|---|---|
osangni/nshin | 2023-10-07T15:20:19.000Z | [
"region:us"
] | osangni | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-16b6f6-60513145412 | 2023-10-04T16:35:30.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-dd0575-60719145416 | 2023-10-04T16:35:37.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-yahoo_answers_topics-yahoo_answers_topics-73f547-60759145417 | 2023-10-04T16:36:09.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-8c71ad-60665145415 | 2023-10-04T16:36:18.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
Nadya78/Nadya78 | 2023-10-04T16:36:40.000Z | [
"region:us"
] | Nadya78 | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-yahoo_answers_topics-yahoo_answers_topics-73f547-60759145418 | 2023-10-04T16:36:49.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-yahoo_answers_topics-yahoo_answers_topics-73f547-60759145419 | 2023-10-04T16:36:56.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-ade_corpus_v2-Ade_corpus_v2_classification-0fb490-60801145420 | 2023-10-04T16:37:05.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-8e8689-60916145421 | 2023-10-04T16:37:33.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-e9dad8-60917145422 | 2023-10-04T16:37:41.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-815de8-60922145423 | 2023-10-04T16:37:50.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-aa0861-60523145413 | 2023-10-04T16:38:11.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-a4664c-60940145424 | 2023-10-04T16:38:19.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-glue-cola-fef953-60983145425 | 2023-10-04T16:38:29.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-6e26aa-61067145426 | 2023-10-04T16:38:48.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-cuad-default-a7d56e-61077145427 | 2023-10-04T16:38:55.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-b27f6f-61091145428 | 2023-10-04T16:39:00.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-glue-cola-42d3ac-61171145429 | 2023-10-04T16:39:12.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-521464-61179145430 | 2023-10-04T16:39:26.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-5301fb-61331145431 | 2023-10-04T16:39:38.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-6cb0d8-61466145432 | 2023-10-04T16:40:03.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-math_dataset-algebra__linear_1d-400643-61492145434 | 2023-10-04T18:04:21.000Z | [
"autotrain",
"evaluation",
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | ---
type: predictions
tags:
- autotrain
- evaluation
datasets:
- math_dataset
eval_info:
task: summarization
model: umarkhalid96/t5-small-train
metrics: ['accuracy', 'cer']
dataset_name: math_dataset
dataset_config: algebra__linear_1d
dataset_split: train
col_mapping:
text: question
target: answer
---
# Dataset Card for AutoTrain Evaluator
This repository contains model predictions generated by [AutoTrain](https://huggingface.co/autotrain) for the following task and dataset:
* Task: Summarization
* Model: umarkhalid96/t5-small-train
* Dataset: math_dataset
* Config: algebra__linear_1d
* Split: train
To run new evaluation jobs, visit Hugging Face's [automatic model evaluator](https://huggingface.co/spaces/autoevaluate/model-evaluator).
## Contributions
Thanks to [@Jesuscarrasco@live.com](https://huggingface.co/Jesuscarrasco@live.com) for evaluating this model. |
autoevaluate/autoeval-eval-poem_sentiment-default-a84867-61574145436 | 2023-10-04T16:40:42.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-adversarial_qa-dbert-5b493d-61553145435 | 2023-10-04T16:42:23.000Z | [
"autotrain",
"evaluation",
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | ---
type: predictions
tags:
- autotrain
- evaluation
datasets:
- adversarial_qa
eval_info:
task: summarization
model: Alred/bart-base-finetuned-summarization-cnn-ver1.1
metrics: ['google_bleu']
dataset_name: adversarial_qa
dataset_config: dbert
dataset_split: validation
col_mapping:
text: question
target: context
---
# Dataset Card for AutoTrain Evaluator
This repository contains model predictions generated by [AutoTrain](https://huggingface.co/autotrain) for the following task and dataset:
* Task: Summarization
* Model: Alred/bart-base-finetuned-summarization-cnn-ver1.1
* Dataset: adversarial_qa
* Config: dbert
* Split: validation
To run new evaluation jobs, visit Hugging Face's [automatic model evaluator](https://huggingface.co/spaces/autoevaluate/model-evaluator).
## Contributions
Thanks to [@kritirwa](https://huggingface.co/kritirwa) for evaluating this model. |
HuggingFaceBR4/thomwolf-small-llama | 2023-10-04T20:57:15.000Z | [
"region:us"
] | HuggingFaceBR4 | null | null | null | 0 | 0 | ---
pretty_name: Evaluation run of 3050
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [3050](https://huggingface.co/3050) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 406 configuration, each one coresponding to one of\
\ the evaluated task.\n\nThe dataset has been created from 116 run(s). Each run\
\ can be found as a specific split in each configuration, the split being named\
\ using the timestamp of the run.The \"train\" split is always pointing to the latest\
\ results.\n\nAn additional configuration \"results\" store all the aggregated results\
\ of the run (and is used to compute and display the agregated metrics on the [Open\
\ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"HuggingFaceBR4/thomwolf-small-llama\"\
,\n\t\"harness_winogrande_0_small_llama_1p82G_the_pile_eval_3050_parquet\",\n\t\
split=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from\
\ run ](https://huggingface.co/datasets/HuggingFaceBR4/thomwolf-small-llama/blob/main/0/results_small-llama-1p82G-the-pile_eval.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.3484644210447005,\n\
\ \"acc_stderr\": 0.011027112856079687,\n \"acc_norm\": 0.31556595066284593,\n\
\ \"acc_norm_stderr\": 0.0116539643464742\n },\n \"harness|arc:challenge|0\"\
: {\n \"acc\": 0.2167235494880546,\n \"acc_stderr\": 0.012040156713481189,\n\
\ \"acc_norm\": 0.2645051194539249,\n \"acc_norm_stderr\": 0.01288927294931337\n\
\ },\n \"harness|arc:easy|0\": {\n \"acc\": 0.25126262626262624,\n\
\ \"acc_stderr\": 0.008900141191221627,\n \"acc_norm\": 0.24621212121212122,\n\
\ \"acc_norm_stderr\": 0.008839902656771871\n },\n \"harness|hellaswag|0\"\
: {\n \"acc\": 0.25343557060346544,\n \"acc_stderr\": 0.0043408916733205065,\n\
\ \"acc_norm\": 0.25731925911173076,\n \"acc_norm_stderr\": 0.00436263363737448\n\
\ },\n \"harness|openbookqa|0\": {\n \"acc\": 0.188,\n \"acc_stderr\"\
: 0.017490678880346222,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\"\
: 0.02051442622562805\n },\n \"harness|piqa|0\": {\n \"acc\": 0.5380848748639826,\n\
\ \"acc_stderr\": 0.011631933367846709,\n \"acc_norm\": 0.5097932535364527,\n\
\ \"acc_norm_stderr\": 0.011663586263283223\n },\n \"harness|super_glue:boolq|0\"\
: {\n \"acc\": 0.4795107033639144,\n \"acc_stderr\": 0.008737709345935943\n\
\ },\n \"harness|winogrande|0\": {\n \"acc\": 0.5122336227308603,\n\
\ \"acc_stderr\": 0.014048278820405621\n }\n}\n```"
repo_url: https://huggingface.co/3050
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1000
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1000.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval100
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval100.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1050
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1050.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1100
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1100.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1150
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1150.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1200
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1200.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1250
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1250.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1300
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1300.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1350
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1350.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1400
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1400.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1450
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1450.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1500
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1500.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval150
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval150.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval1550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1550
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval1550.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval200
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval200.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval250
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval250.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval300
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval300.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval350
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval350.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval400
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval400.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval450
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval450.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval500
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval500.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval50_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval50
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval50.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval550
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval550.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval600
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval600.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval650
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval650.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval700
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval700.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval750
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval750.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval800
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval800.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval850
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval850.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval900
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval900.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval950
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval950.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_1600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1600
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_1600.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_1650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1650
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_1650.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_1700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1700
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_1700.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_1750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1750
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_1750.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_1800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1800
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_1800.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_1850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1850
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_1850.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_1900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1900
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_1900.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_1950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1950
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_1950.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2000
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2000.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2050
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2050.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2100
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2100.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2150
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2150.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2200
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2200.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2250
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2250.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2300
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2300.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2350
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2350.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2400
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2400.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2450
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2450.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2500
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2500.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2750
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2750.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2800
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2800.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2850
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2850.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2900
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2900.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_2950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2950
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_2950.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_3000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3000
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_3000.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_3050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3050
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval_3050.parquet'
- config_name: harness_arc_challenge_0_small_llama_1p82G_the_pile_eval_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval
path:
- '**/details_harness|arc:challenge|0_small-llama-1p82G-the-pile_eval.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1000
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1000.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval100
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval100.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1050
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1050.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1100
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1100.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1150
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1150.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1200
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1200.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1250
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1250.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1300
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1300.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1350
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1350.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1400
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1400.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1450
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1450.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1500
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1500.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval150
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval150.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval1550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1550
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval1550.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval200
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval200.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval250
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval250.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval300
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval300.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval350
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval350.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval400
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval400.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval450
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval450.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval500
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval500.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval50_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval50
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval50.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval550
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval550.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval600
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval600.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval650
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval650.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval700
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval700.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval750
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval750.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval800
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval800.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval850
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval850.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval900
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval900.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval950
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval950.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_1600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1600
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_1600.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_1650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1650
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_1650.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_1700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1700
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_1700.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_1750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1750
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_1750.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_1800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1800
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_1800.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_1850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1850
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_1850.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_1900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1900
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_1900.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_1950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1950
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_1950.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2000
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2000.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2050
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2050.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2100
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2100.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2150
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2150.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2200
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2200.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2250
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2250.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2300
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2300.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2350
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2350.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2400
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2400.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2450
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2450.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2500
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2500.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2750
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2750.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2800
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2800.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2850
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2850.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2900
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2900.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_2950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2950
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_2950.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_3000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3000
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_3000.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_3050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3050
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval_3050.parquet'
- config_name: harness_arc_easy_0_small_llama_1p82G_the_pile_eval_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval
path:
- '**/details_harness|arc:easy|0_small-llama-1p82G-the-pile_eval.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1000
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1000.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval100
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval100.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1050
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1050.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1100
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1100.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1150
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1150.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1200
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1200.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1250
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1250.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1300
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1300.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1350
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1350.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1400
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1400.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1450
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1450.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1500
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1500.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval150
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval150.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval1550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1550
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval1550.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval200
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval200.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval250
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval250.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval300
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval300.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval350
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval350.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval400
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval400.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval450
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval450.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval500
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval500.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval50_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval50
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval50.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval550
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval550.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval600
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval600.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval650
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval650.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval700
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval700.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval750
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval750.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval800
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval800.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval850
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval850.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval900
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval900.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval950
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval950.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_1600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1600
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_1600.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_1650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1650
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_1650.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_1700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1700
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_1700.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_1750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1750
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_1750.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_1800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1800
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_1800.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_1850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1850
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_1850.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_1900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1900
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_1900.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_1950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1950
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_1950.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2000
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2000.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2050
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2050.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2100
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2100.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2150
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2150.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2200
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2200.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2250
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2250.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2300
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2300.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2350
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2350.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2400
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2400.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2450
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2450.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2500
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2500.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2750
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2750.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2800
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2800.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2850
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2850.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2900
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2900.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_2950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2950
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_2950.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_3000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3000
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_3000.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_3050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3050
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval_3050.parquet'
- config_name: harness_hellaswag_0_small_llama_1p82G_the_pile_eval_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval
path:
- '**/details_harness|hellaswag|0_small-llama-1p82G-the-pile_eval.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1000
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1000.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval100
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval100.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1050
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1050.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1100
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1100.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1150
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1150.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1200
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1200.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1250
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1250.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1300
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1300.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1350
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1350.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1400
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1400.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1450
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1450.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1500
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1500.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval150
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval150.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval1550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1550
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval1550.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval200
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval200.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval250
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval250.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval300
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval300.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval350
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval350.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval400
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval400.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval450
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval450.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval500
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval500.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval50_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval50
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval50.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval550
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval550.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval600
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval600.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval650
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval650.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval700
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval700.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval750
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval750.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval800
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval800.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval850
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval850.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval900
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval900.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval950
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval950.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_1600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1600
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_1600.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_1650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1650
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_1650.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_1700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1700
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_1700.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_1750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1750
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_1750.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_1800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1800
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_1800.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_1850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1850
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_1850.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_1900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1900
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_1900.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_1950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1950
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_1950.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2000
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2000.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2050
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2050.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2100
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2100.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2150
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2150.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2200
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2200.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2250
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2250.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2300
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2300.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2350
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2350.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2400
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2400.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2450
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2450.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2500
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2500.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2750
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2750.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2800
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2800.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2850
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2850.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2900
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2900.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_2950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2950
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_2950.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_3000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3000
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_3000.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_3050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3050
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval_3050.parquet'
- config_name: harness_openbookqa_0_small_llama_1p82G_the_pile_eval_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval
path:
- '**/details_harness|openbookqa|0_small-llama-1p82G-the-pile_eval.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1000
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1000.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval100
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval100.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1050
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1050.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1100
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1100.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1150
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1150.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1200
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1200.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1250
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1250.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1300
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1300.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1350
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1350.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1400
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1400.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1450
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1450.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1500
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1500.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval150
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval150.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval1550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1550
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval1550.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval200
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval200.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval250
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval250.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval300
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval300.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval350
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval350.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval400
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval400.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval450
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval450.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval500
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval500.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval50_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval50
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval50.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval550
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval550.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval600
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval600.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval650
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval650.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval700
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval700.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval750
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval750.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval800
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval800.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval850
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval850.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval900
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval900.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval950
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval950.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_1600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1600
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_1600.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_1650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1650
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_1650.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_1700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1700
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_1700.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_1750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1750
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_1750.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_1800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1800
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_1800.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_1850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1850
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_1850.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_1900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1900
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_1900.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_1950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1950
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_1950.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2000
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2000.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2050
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2050.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2100
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2100.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2150
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2150.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2200
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2200.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2250
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2250.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2300
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2300.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2350
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2350.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2400
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2400.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2450
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2450.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2500
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2500.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2750
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2750.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2800
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2800.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2850
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2850.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2900
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2900.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_2950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2950
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_2950.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_3000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3000
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_3000.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_3050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3050
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval_3050.parquet'
- config_name: harness_piqa_0_small_llama_1p82G_the_pile_eval_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval
path:
- '**/details_harness|piqa|0_small-llama-1p82G-the-pile_eval.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1000
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1000.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval100
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval100.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1050
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1050.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1100
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1100.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1150
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1150.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1200
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1200.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1250
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1250.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1300
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1300.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1350
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1350.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1400
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1400.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1450
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1450.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1500
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1500.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval150
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval150.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval1550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1550
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval1550.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval200
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval200.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval250
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval250.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval300
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval300.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval350
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval350.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval400
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval400.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval450
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval450.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval500
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval500.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval50_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval50
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval50.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval550
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval550.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval600
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval600.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval650
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval650.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval700
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval700.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval750
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval750.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval800
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval800.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval850
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval850.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval900
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval900.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval950
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval950.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_1600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1600
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_1600.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_1650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1650
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_1650.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_1700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1700
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_1700.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_1750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1750
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_1750.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_1800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1800
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_1800.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_1850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1850
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_1850.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_1900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1900
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_1900.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_1950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1950
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_1950.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2000
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2000.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2050
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2050.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2100
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2100.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2150
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2150.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2200
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2200.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2250
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2250.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2300
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2300.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2350
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2350.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2400
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2400.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2450
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2450.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2500
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2500.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2750
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2750.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2800
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2800.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2850
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2850.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2900
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2900.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_2950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2950
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_2950.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_3000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3000
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_3000.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_3050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3050
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval_3050.parquet'
- config_name: harness_super_glue_boolq_0_small_llama_1p82G_the_pile_eval_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval
path:
- '**/details_harness|super_glue:boolq|0_small-llama-1p82G-the-pile_eval.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1000
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1000.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval100
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval100.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1050
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1050.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1100
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1100.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1150
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1150.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1200
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1200.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1250
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1250.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1300
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1300.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1350
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1350.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1400
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1400.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1450
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1450.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1500
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1500.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval150
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval150.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval1550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval1550
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval1550.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval200
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval200.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval250
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval250.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval300
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval300.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval350
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval350.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval400
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval400.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval450
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval450.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval500
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval500.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval50_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval50
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval50.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval550_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval550
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval550.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval600
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval600.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval650
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval650.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval700
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval700.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval750
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval750.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval800
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval800.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval850
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval850.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval900
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval900.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval950
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval950.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_1600_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1600
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_1600.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_1650_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1650
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_1650.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_1700_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1700
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_1700.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_1750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1750
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_1750.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_1800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1800
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_1800.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_1850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1850
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_1850.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_1900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1900
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_1900.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_1950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_1950
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_1950.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2000
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2000.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2050
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2050.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2100_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2100
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2100.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2150_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2150
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2150.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2200_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2200
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2200.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2250_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2250
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2250.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2300_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2300
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2300.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2350_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2350
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2350.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2400_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2400
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2400.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2450_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2450
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2450.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2500_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2500
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2500.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2750_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2750
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2750.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2800_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2800
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2800.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2850_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2850
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2850.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2900_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2900
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2900.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_2950_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_2950
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_2950.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_3000_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3000
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_3000.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_3050_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval_3050
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval_3050.parquet'
- config_name: harness_winogrande_0_small_llama_1p82G_the_pile_eval_parquet
data_files:
- split: small_llama_1p82G_the_pile_eval
path:
- '**/details_harness|winogrande|0_small-llama-1p82G-the-pile_eval.parquet'
- config_name: results
data_files:
- split: small_llama_1p82G_the_pile_eval
path:
- results_small-llama-1p82G-the-pile_eval.parquet
- split: small_llama_1p82G_the_pile_eval100
path:
- results_small-llama-1p82G-the-pile_eval100.parquet
- split: small_llama_1p82G_the_pile_eval1000
path:
- results_small-llama-1p82G-the-pile_eval1000.parquet
- split: small_llama_1p82G_the_pile_eval1050
path:
- results_small-llama-1p82G-the-pile_eval1050.parquet
- split: small_llama_1p82G_the_pile_eval1100
path:
- results_small-llama-1p82G-the-pile_eval1100.parquet
- split: small_llama_1p82G_the_pile_eval1150
path:
- results_small-llama-1p82G-the-pile_eval1150.parquet
- split: small_llama_1p82G_the_pile_eval1200
path:
- results_small-llama-1p82G-the-pile_eval1200.parquet
- split: small_llama_1p82G_the_pile_eval1250
path:
- results_small-llama-1p82G-the-pile_eval1250.parquet
- split: small_llama_1p82G_the_pile_eval1300
path:
- results_small-llama-1p82G-the-pile_eval1300.parquet
- split: small_llama_1p82G_the_pile_eval1350
path:
- results_small-llama-1p82G-the-pile_eval1350.parquet
- split: small_llama_1p82G_the_pile_eval1400
path:
- results_small-llama-1p82G-the-pile_eval1400.parquet
- split: small_llama_1p82G_the_pile_eval1450
path:
- results_small-llama-1p82G-the-pile_eval1450.parquet
- split: small_llama_1p82G_the_pile_eval150
path:
- results_small-llama-1p82G-the-pile_eval150.parquet
- split: small_llama_1p82G_the_pile_eval1500
path:
- results_small-llama-1p82G-the-pile_eval1500.parquet
- split: small_llama_1p82G_the_pile_eval1550
path:
- results_small-llama-1p82G-the-pile_eval1550.parquet
- split: small_llama_1p82G_the_pile_eval200
path:
- results_small-llama-1p82G-the-pile_eval200.parquet
- split: small_llama_1p82G_the_pile_eval250
path:
- results_small-llama-1p82G-the-pile_eval250.parquet
- split: small_llama_1p82G_the_pile_eval300
path:
- results_small-llama-1p82G-the-pile_eval300.parquet
- split: small_llama_1p82G_the_pile_eval350
path:
- results_small-llama-1p82G-the-pile_eval350.parquet
- split: small_llama_1p82G_the_pile_eval400
path:
- results_small-llama-1p82G-the-pile_eval400.parquet
- split: small_llama_1p82G_the_pile_eval450
path:
- results_small-llama-1p82G-the-pile_eval450.parquet
- split: small_llama_1p82G_the_pile_eval50
path:
- results_small-llama-1p82G-the-pile_eval50.parquet
- split: small_llama_1p82G_the_pile_eval500
path:
- results_small-llama-1p82G-the-pile_eval500.parquet
- split: small_llama_1p82G_the_pile_eval550
path:
- results_small-llama-1p82G-the-pile_eval550.parquet
- split: small_llama_1p82G_the_pile_eval600
path:
- results_small-llama-1p82G-the-pile_eval600.parquet
- split: small_llama_1p82G_the_pile_eval650
path:
- results_small-llama-1p82G-the-pile_eval650.parquet
- split: small_llama_1p82G_the_pile_eval700
path:
- results_small-llama-1p82G-the-pile_eval700.parquet
- split: small_llama_1p82G_the_pile_eval750
path:
- results_small-llama-1p82G-the-pile_eval750.parquet
- split: small_llama_1p82G_the_pile_eval800
path:
- results_small-llama-1p82G-the-pile_eval800.parquet
- split: small_llama_1p82G_the_pile_eval850
path:
- results_small-llama-1p82G-the-pile_eval850.parquet
- split: small_llama_1p82G_the_pile_eval900
path:
- results_small-llama-1p82G-the-pile_eval900.parquet
- split: small_llama_1p82G_the_pile_eval950
path:
- results_small-llama-1p82G-the-pile_eval950.parquet
- split: small_llama_1p82G_the_pile_eval_1600
path:
- results_small-llama-1p82G-the-pile_eval_1600.parquet
- split: small_llama_1p82G_the_pile_eval_1650
path:
- results_small-llama-1p82G-the-pile_eval_1650.parquet
- split: small_llama_1p82G_the_pile_eval_1700
path:
- results_small-llama-1p82G-the-pile_eval_1700.parquet
- split: small_llama_1p82G_the_pile_eval_1750
path:
- results_small-llama-1p82G-the-pile_eval_1750.parquet
- split: small_llama_1p82G_the_pile_eval_1800
path:
- results_small-llama-1p82G-the-pile_eval_1800.parquet
- split: small_llama_1p82G_the_pile_eval_1850
path:
- results_small-llama-1p82G-the-pile_eval_1850.parquet
- split: small_llama_1p82G_the_pile_eval_1900
path:
- results_small-llama-1p82G-the-pile_eval_1900.parquet
- split: small_llama_1p82G_the_pile_eval_1950
path:
- results_small-llama-1p82G-the-pile_eval_1950.parquet
- split: small_llama_1p82G_the_pile_eval_2000
path:
- results_small-llama-1p82G-the-pile_eval_2000.parquet
- split: small_llama_1p82G_the_pile_eval_2050
path:
- results_small-llama-1p82G-the-pile_eval_2050.parquet
- split: small_llama_1p82G_the_pile_eval_2100
path:
- results_small-llama-1p82G-the-pile_eval_2100.parquet
- split: small_llama_1p82G_the_pile_eval_2150
path:
- results_small-llama-1p82G-the-pile_eval_2150.parquet
- split: small_llama_1p82G_the_pile_eval_2200
path:
- results_small-llama-1p82G-the-pile_eval_2200.parquet
- split: small_llama_1p82G_the_pile_eval_2250
path:
- results_small-llama-1p82G-the-pile_eval_2250.parquet
- split: small_llama_1p82G_the_pile_eval_2300
path:
- results_small-llama-1p82G-the-pile_eval_2300.parquet
- split: small_llama_1p82G_the_pile_eval_2350
path:
- results_small-llama-1p82G-the-pile_eval_2350.parquet
- split: small_llama_1p82G_the_pile_eval_2400
path:
- results_small-llama-1p82G-the-pile_eval_2400.parquet
- split: small_llama_1p82G_the_pile_eval_2450
path:
- results_small-llama-1p82G-the-pile_eval_2450.parquet
- split: small_llama_1p82G_the_pile_eval_2500
path:
- results_small-llama-1p82G-the-pile_eval_2500.parquet
- split: small_llama_1p82G_the_pile_eval_2750
path:
- results_small-llama-1p82G-the-pile_eval_2750.parquet
- split: small_llama_1p82G_the_pile_eval_2800
path:
- results_small-llama-1p82G-the-pile_eval_2800.parquet
- split: small_llama_1p82G_the_pile_eval_2850
path:
- results_small-llama-1p82G-the-pile_eval_2850.parquet
- split: small_llama_1p82G_the_pile_eval_2900
path:
- results_small-llama-1p82G-the-pile_eval_2900.parquet
- split: small_llama_1p82G_the_pile_eval_2950
path:
- results_small-llama-1p82G-the-pile_eval_2950.parquet
- split: small_llama_1p82G_the_pile_eval_3000
path:
- results_small-llama-1p82G-the-pile_eval_3000.parquet
- split: small_llama_1p82G_the_pile_eval_3050
path:
- results_small-llama-1p82G-the-pile_eval_3050.parquet
---
# Dataset Card for Evaluation run of 3050
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/3050
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [3050](https://huggingface.co/3050) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 406 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 116 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("HuggingFaceBR4/thomwolf-small-llama",
"harness_winogrande_0_small_llama_1p82G_the_pile_eval_3050_parquet",
split="train")
```
## Latest results
These are the [latest results from run ](https://huggingface.co/datasets/HuggingFaceBR4/thomwolf-small-llama/blob/main/0/results_small-llama-1p82G-the-pile_eval.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.3484644210447005,
"acc_stderr": 0.011027112856079687,
"acc_norm": 0.31556595066284593,
"acc_norm_stderr": 0.0116539643464742
},
"harness|arc:challenge|0": {
"acc": 0.2167235494880546,
"acc_stderr": 0.012040156713481189,
"acc_norm": 0.2645051194539249,
"acc_norm_stderr": 0.01288927294931337
},
"harness|arc:easy|0": {
"acc": 0.25126262626262624,
"acc_stderr": 0.008900141191221627,
"acc_norm": 0.24621212121212122,
"acc_norm_stderr": 0.008839902656771871
},
"harness|hellaswag|0": {
"acc": 0.25343557060346544,
"acc_stderr": 0.0043408916733205065,
"acc_norm": 0.25731925911173076,
"acc_norm_stderr": 0.00436263363737448
},
"harness|openbookqa|0": {
"acc": 0.188,
"acc_stderr": 0.017490678880346222,
"acc_norm": 0.3,
"acc_norm_stderr": 0.02051442622562805
},
"harness|piqa|0": {
"acc": 0.5380848748639826,
"acc_stderr": 0.011631933367846709,
"acc_norm": 0.5097932535364527,
"acc_norm_stderr": 0.011663586263283223
},
"harness|super_glue:boolq|0": {
"acc": 0.4795107033639144,
"acc_stderr": 0.008737709345935943
},
"harness|winogrande|0": {
"acc": 0.5122336227308603,
"acc_stderr": 0.014048278820405621
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
autoevaluate/autoeval-eval-acronym_identification-default-74aaf7-61623145439 | 2023-10-04T16:42:49.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-imdb-plain_text-b0ca67-61616145438 | 2023-10-04T16:43:19.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-poem_sentiment-default-6ed962-61575145437 | 2023-10-04T16:43:41.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-5e41bb-60616145414 | 2023-10-04T16:44:24.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-c11a9e-60482145407 | 2023-10-04T16:45:03.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-c66c9a-61631145441 | 2023-10-04T16:45:42.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-b3b6fa-61692145443 | 2023-10-04T16:46:21.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-ag_news-default-dee591-61725145444 | 2023-10-04T16:47:07.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-ag_news-default-dee591-61725145445 | 2023-10-04T16:48:09.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-ag_news-default-dee591-61725145446 | 2023-10-04T16:48:51.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-glue-cola-fc1172-61836145447 | 2023-10-04T16:49:32.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-code_search_net-java-509f82-60081145391 | 2023-10-04T16:50:15.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-aa0cfa-62027145451 | 2023-10-04T16:50:53.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-b5d06a-62145145453 | 2023-10-04T16:51:31.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-8eb5dc-62155145454 | 2023-10-04T16:51:59.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-a74c6a-62169145455 | 2023-10-04T16:52:08.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-glue-cola-100546-62010145450 | 2023-10-04T16:52:40.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-5b34f2-62170145456 | 2023-10-04T16:52:47.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-9bf877-62465145458 | 2023-10-04T16:53:00.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-imdb-plain_text-9fd163-62516145459 | 2023-10-04T16:53:21.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-fec224-62535145460 | 2023-10-04T16:53:25.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-imdb-plain_text-2fe1eb-62554145461 | 2023-10-04T16:53:37.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-ade_corpus_v2-Ade_corpus_v2_classification-3d4cb7-62609145462 | 2023-10-04T16:54:03.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-cnn_dailymail-3.0.0-bcce97-62650145463 | 2023-10-04T17:03:52.000Z | [
"autotrain",
"evaluation",
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | ---
type: predictions
tags:
- autotrain
- evaluation
datasets:
- cnn_dailymail
eval_info:
task: summarization
model: morenolq/bart-base-xsum
metrics: ['bertscore']
dataset_name: cnn_dailymail
dataset_config: 3.0.0
dataset_split: test
col_mapping:
text: article
target: highlights
---
# Dataset Card for AutoTrain Evaluator
This repository contains model predictions generated by [AutoTrain](https://huggingface.co/autotrain) for the following task and dataset:
* Task: Summarization
* Model: morenolq/bart-base-xsum
* Dataset: cnn_dailymail
* Config: 3.0.0
* Split: test
To run new evaluation jobs, visit Hugging Face's [automatic model evaluator](https://huggingface.co/spaces/autoevaluate/model-evaluator).
## Contributions
Thanks to [@Raffix](https://huggingface.co/Raffix) for evaluating this model. |
autoevaluate/autoeval-eval-ade_corpus_v2-Ade_corpus_v2_classification-acfa9c-62664145464 | 2023-10-04T16:54:25.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-ade_corpus_v2-Ade_corpus_v2_classification-acfa9c-62664145465 | 2023-10-04T16:54:44.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-c14190-61467145433 | 2023-10-04T16:55:06.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-25226b-61630145440 | 2023-10-04T16:55:28.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-imdb-plain_text-7f4909-62867145470 | 2023-10-04T16:55:44.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-squad-plain_text-c5543e-62761145468 | 2023-10-04T16:56:07.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
open-llm-leaderboard/details_aiplanet__panda-coder-13B | 2023-10-04T16:57:47.000Z | [
"region:us"
] | open-llm-leaderboard | null | null | null | 0 | 0 | ---
pretty_name: Evaluation run of aiplanet/panda-coder-13B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [aiplanet/panda-coder-13B](https://huggingface.co/aiplanet/panda-coder-13B) on\
\ the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 61 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the agregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_aiplanet__panda-coder-13B\"\
,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\
\nThese are the [latest results from run 2023-10-04T16:56:18.723336](https://huggingface.co/datasets/open-llm-leaderboard/details_aiplanet__panda-coder-13B/blob/main/results_2023-10-04T16-56-18.723336.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2314240573187148,\n\
\ \"acc_stderr\": 0.03071122006512167,\n \"acc_norm\": 0.2314240573187148,\n\
\ \"acc_norm_stderr\": 0.03071122006512167,\n \"mc1\": 1.0,\n \
\ \"mc1_stderr\": 0.0,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n\
\ },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.22696245733788395,\n\
\ \"acc_stderr\": 0.012240491536132861,\n \"acc_norm\": 0.22696245733788395,\n\
\ \"acc_norm_stderr\": 0.012240491536132861\n },\n \"harness|hellaswag|10\"\
: {\n \"acc\": 0.2504481179047998,\n \"acc_stderr\": 0.004323856300539177,\n\
\ \"acc_norm\": 0.2504481179047998,\n \"acc_norm_stderr\": 0.004323856300539177\n\
\ },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.22,\n\
\ \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \
\ \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-anatomy|5\"\
: {\n \"acc\": 0.18518518518518517,\n \"acc_stderr\": 0.03355677216313142,\n\
\ \"acc_norm\": 0.18518518518518517,\n \"acc_norm_stderr\": 0.03355677216313142\n\
\ },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n\
\ \"acc_stderr\": 0.031103182383123398,\n \"acc_norm\": 0.17763157894736842,\n\
\ \"acc_norm_stderr\": 0.031103182383123398\n },\n \"harness|hendrycksTest-business_ethics|5\"\
: {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n\
\ \"acc_stderr\": 0.02528839450289137,\n \"acc_norm\": 0.21509433962264152,\n\
\ \"acc_norm_stderr\": 0.02528839450289137\n },\n \"harness|hendrycksTest-college_biology|5\"\
: {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n\
\ \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n\
\ },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\":\
\ 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \"acc_norm\": 0.2,\n\
\ \"acc_norm_stderr\": 0.04020151261036845\n },\n \"harness|hendrycksTest-college_computer_science|5\"\
: {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \
\ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n\
\ \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n\
\ \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \
\ \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\"\
: {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.030952890217749874,\n\
\ \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.030952890217749874\n\
\ },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n\
\ \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n\
\ \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\"\
: {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \
\ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n \
\ },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\":\
\ 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n \"\
acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\
\ \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n\
\ \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n\
\ \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"\
acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n\
\ \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n\
\ \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \
\ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"\
acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"\
acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\"\
: 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n\
\ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"\
acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n\
\ \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n\
\ \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \
\ \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n\
\ \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"\
acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"\
acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"\
acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n\
\ \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\
: {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n\
\ \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n\
\ \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n\
\ \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n\
\ \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"\
acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n\
\ \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n\
\ \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n\
\ \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n\
\ \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n\
\ \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n\
\ \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n\
\ \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n\
\ \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n\
\ \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n\
\ \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n\
\ \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n\
\ \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n\
\ \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n\
\ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n\
\ \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n\
\ \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n\
\ \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \
\ \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n\
\ \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n\
\ \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n\
\ \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\"\
: 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\"\
: {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n\
\ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n\
\ },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n\
\ \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n\
\ \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\"\
: {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n\
\ \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n\
\ },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\":\
\ 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n\
\ \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\"\
: {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n\
\ \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n\
\ },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n\
\ \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n\
\ \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\"\
: {\n \"mc1\": 1.0,\n \"mc1_stderr\": 0.0,\n \"mc2\": NaN,\n\
\ \"mc2_stderr\": NaN\n }\n}\n```"
repo_url: https://huggingface.co/aiplanet/panda-coder-13B
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|arc:challenge|25_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hellaswag|10_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-10-04T16-56-18.723336.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-management|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-virology|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T16-56-18.723336.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- '**/details_harness|truthfulqa:mc|0_2023-10-04T16-56-18.723336.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-10-04T16-56-18.723336.parquet'
- config_name: results
data_files:
- split: 2023_10_04T16_56_18.723336
path:
- results_2023-10-04T16-56-18.723336.parquet
- split: latest
path:
- results_2023-10-04T16-56-18.723336.parquet
---
# Dataset Card for Evaluation run of aiplanet/panda-coder-13B
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/aiplanet/panda-coder-13B
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [aiplanet/panda-coder-13B](https://huggingface.co/aiplanet/panda-coder-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_aiplanet__panda-coder-13B",
"harness_truthfulqa_mc_0",
split="train")
```
## Latest results
These are the [latest results from run 2023-10-04T16:56:18.723336](https://huggingface.co/datasets/open-llm-leaderboard/details_aiplanet__panda-coder-13B/blob/main/results_2023-10-04T16-56-18.723336.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.2314240573187148,
"acc_stderr": 0.03071122006512167,
"acc_norm": 0.2314240573187148,
"acc_norm_stderr": 0.03071122006512167,
"mc1": 1.0,
"mc1_stderr": 0.0,
"mc2": NaN,
"mc2_stderr": NaN
},
"harness|arc:challenge|25": {
"acc": 0.22696245733788395,
"acc_stderr": 0.012240491536132861,
"acc_norm": 0.22696245733788395,
"acc_norm_stderr": 0.012240491536132861
},
"harness|hellaswag|10": {
"acc": 0.2504481179047998,
"acc_stderr": 0.004323856300539177,
"acc_norm": 0.2504481179047998,
"acc_norm_stderr": 0.004323856300539177
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932268,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932268
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.18518518518518517,
"acc_stderr": 0.03355677216313142,
"acc_norm": 0.18518518518518517,
"acc_norm_stderr": 0.03355677216313142
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.17763157894736842,
"acc_stderr": 0.031103182383123398,
"acc_norm": 0.17763157894736842,
"acc_norm_stderr": 0.031103182383123398
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.21509433962264152,
"acc_stderr": 0.02528839450289137,
"acc_norm": 0.21509433962264152,
"acc_norm_stderr": 0.02528839450289137
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.2569444444444444,
"acc_stderr": 0.03653946969442099,
"acc_norm": 0.2569444444444444,
"acc_norm_stderr": 0.03653946969442099
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036845,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036845
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.20809248554913296,
"acc_stderr": 0.030952890217749874,
"acc_norm": 0.20809248554913296,
"acc_norm_stderr": 0.030952890217749874
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.21568627450980393,
"acc_stderr": 0.04092563958237654,
"acc_norm": 0.21568627450980393,
"acc_norm_stderr": 0.04092563958237654
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.26382978723404255,
"acc_stderr": 0.028809989854102973,
"acc_norm": 0.26382978723404255,
"acc_norm_stderr": 0.028809989854102973
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.039994238792813365,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.039994238792813365
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03565998174135302,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.03565998174135302
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.20899470899470898,
"acc_stderr": 0.02094048156533486,
"acc_norm": 0.20899470899470898,
"acc_norm_stderr": 0.02094048156533486
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.04040610178208841,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.04040610178208841
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.18,
"acc_stderr": 0.038612291966536934,
"acc_norm": 0.18,
"acc_norm_stderr": 0.038612291966536934
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.1774193548387097,
"acc_stderr": 0.02173254068932927,
"acc_norm": 0.1774193548387097,
"acc_norm_stderr": 0.02173254068932927
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.15270935960591134,
"acc_stderr": 0.02530890453938063,
"acc_norm": 0.15270935960591134,
"acc_norm_stderr": 0.02530890453938063
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.21818181818181817,
"acc_stderr": 0.03225078108306289,
"acc_norm": 0.21818181818181817,
"acc_norm_stderr": 0.03225078108306289
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.17676767676767677,
"acc_stderr": 0.027178752639044915,
"acc_norm": 0.17676767676767677,
"acc_norm_stderr": 0.027178752639044915
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.19689119170984457,
"acc_stderr": 0.028697873971860664,
"acc_norm": 0.19689119170984457,
"acc_norm_stderr": 0.028697873971860664
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.20256410256410257,
"acc_stderr": 0.020377660970371372,
"acc_norm": 0.20256410256410257,
"acc_norm_stderr": 0.020377660970371372
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2111111111111111,
"acc_stderr": 0.024882116857655075,
"acc_norm": 0.2111111111111111,
"acc_norm_stderr": 0.024882116857655075
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.21008403361344538,
"acc_stderr": 0.026461398717471874,
"acc_norm": 0.21008403361344538,
"acc_norm_stderr": 0.026461398717471874
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.1986754966887417,
"acc_stderr": 0.03257847384436776,
"acc_norm": 0.1986754966887417,
"acc_norm_stderr": 0.03257847384436776
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.1926605504587156,
"acc_stderr": 0.016909276884936094,
"acc_norm": 0.1926605504587156,
"acc_norm_stderr": 0.016909276884936094
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.1527777777777778,
"acc_stderr": 0.024536326026134224,
"acc_norm": 0.1527777777777778,
"acc_norm_stderr": 0.024536326026134224
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.25,
"acc_stderr": 0.03039153369274154,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03039153369274154
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.270042194092827,
"acc_stderr": 0.028900721906293426,
"acc_norm": 0.270042194092827,
"acc_norm_stderr": 0.028900721906293426
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.31390134529147984,
"acc_stderr": 0.031146796482972465,
"acc_norm": 0.31390134529147984,
"acc_norm_stderr": 0.031146796482972465
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.2595419847328244,
"acc_stderr": 0.03844876139785271,
"acc_norm": 0.2595419847328244,
"acc_norm_stderr": 0.03844876139785271
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.2396694214876033,
"acc_stderr": 0.03896878985070417,
"acc_norm": 0.2396694214876033,
"acc_norm_stderr": 0.03896878985070417
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.042365112580946336,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.042365112580946336
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.22085889570552147,
"acc_stderr": 0.032591773927421776,
"acc_norm": 0.22085889570552147,
"acc_norm_stderr": 0.032591773927421776
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.3125,
"acc_stderr": 0.043994650575715215,
"acc_norm": 0.3125,
"acc_norm_stderr": 0.043994650575715215
},
"harness|hendrycksTest-management|5": {
"acc": 0.17475728155339806,
"acc_stderr": 0.037601780060266224,
"acc_norm": 0.17475728155339806,
"acc_norm_stderr": 0.037601780060266224
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.2905982905982906,
"acc_stderr": 0.02974504857267404,
"acc_norm": 0.2905982905982906,
"acc_norm_stderr": 0.02974504857267404
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.23754789272030652,
"acc_stderr": 0.015218733046150193,
"acc_norm": 0.23754789272030652,
"acc_norm_stderr": 0.015218733046150193
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.24855491329479767,
"acc_stderr": 0.023267528432100174,
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.023267528432100174
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574915,
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574915
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.023929155517351284,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.023929155517351284
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.1864951768488746,
"acc_stderr": 0.02212243977248077,
"acc_norm": 0.1864951768488746,
"acc_norm_stderr": 0.02212243977248077
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.21604938271604937,
"acc_stderr": 0.022899162918445806,
"acc_norm": 0.21604938271604937,
"acc_norm_stderr": 0.022899162918445806
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.23404255319148937,
"acc_stderr": 0.025257861359432417,
"acc_norm": 0.23404255319148937,
"acc_norm_stderr": 0.025257861359432417
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.2457627118644068,
"acc_stderr": 0.010996156635142692,
"acc_norm": 0.2457627118644068,
"acc_norm_stderr": 0.010996156635142692
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.18382352941176472,
"acc_stderr": 0.023529242185193106,
"acc_norm": 0.18382352941176472,
"acc_norm_stderr": 0.023529242185193106
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.25,
"acc_stderr": 0.01751781884501444,
"acc_norm": 0.25,
"acc_norm_stderr": 0.01751781884501444
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.21818181818181817,
"acc_stderr": 0.03955932861795833,
"acc_norm": 0.21818181818181817,
"acc_norm_stderr": 0.03955932861795833
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.18775510204081633,
"acc_stderr": 0.02500025603954621,
"acc_norm": 0.18775510204081633,
"acc_norm_stderr": 0.02500025603954621
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.24378109452736318,
"acc_stderr": 0.03036049015401465,
"acc_norm": 0.24378109452736318,
"acc_norm_stderr": 0.03036049015401465
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|hendrycksTest-virology|5": {
"acc": 0.28313253012048195,
"acc_stderr": 0.03507295431370518,
"acc_norm": 0.28313253012048195,
"acc_norm_stderr": 0.03507295431370518
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.3216374269005848,
"acc_stderr": 0.03582529442573122,
"acc_norm": 0.3216374269005848,
"acc_norm_stderr": 0.03582529442573122
},
"harness|truthfulqa:mc|0": {
"mc1": 1.0,
"mc1_stderr": 0.0,
"mc2": NaN,
"mc2_stderr": NaN
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
autoevaluate/autoeval-eval-acronym_identification-default-4f802c-61667145442 | 2023-10-04T16:56:55.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-e27da7-61894145448 | 2023-10-04T16:57:30.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-imdb-plain_text-f26d86-63035145473 | 2023-10-04T16:57:33.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
BangumiBase/nisekoi | 2023-10-04T19:26:34.000Z | [
"size_categories:1K<n<10K",
"license:mit",
"art",
"region:us"
] | BangumiBase | null | null | null | 0 | 0 | ---
license: mit
tags:
- art
size_categories:
- 1K<n<10K
---
# Bangumi Image Base of Nisekoi
This is the image base of bangumi NISEKOI, we detected 38 characters, 4374 images in total. The full dataset is [here](all.zip).
**Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual.** If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability).
Here is the characters' preview:
| # | Images | Download | Preview 1 | Preview 2 | Preview 3 | Preview 4 | Preview 5 | Preview 6 | Preview 7 | Preview 8 |
|:------|---------:|:---------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|
| 0 | 1202 | [Download](0/dataset.zip) |  |  |  |  |  |  |  |  |
| 1 | 107 | [Download](1/dataset.zip) |  |  |  |  |  |  |  |  |
| 2 | 222 | [Download](2/dataset.zip) |  |  |  |  |  |  |  |  |
| 3 | 39 | [Download](3/dataset.zip) |  |  |  |  |  |  |  |  |
| 4 | 140 | [Download](4/dataset.zip) |  |  |  |  |  |  |  |  |
| 5 | 37 | [Download](5/dataset.zip) |  |  |  |  |  |  |  |  |
| 6 | 18 | [Download](6/dataset.zip) |  |  |  |  |  |  |  |  |
| 7 | 55 | [Download](7/dataset.zip) |  |  |  |  |  |  |  |  |
| 8 | 67 | [Download](8/dataset.zip) |  |  |  |  |  |  |  |  |
| 9 | 40 | [Download](9/dataset.zip) |  |  |  |  |  |  |  |  |
| 10 | 448 | [Download](10/dataset.zip) |  |  |  |  |  |  |  |  |
| 11 | 36 | [Download](11/dataset.zip) |  |  |  |  |  |  |  |  |
| 12 | 47 | [Download](12/dataset.zip) |  |  |  |  |  |  |  |  |
| 13 | 45 | [Download](13/dataset.zip) |  |  |  |  |  |  |  |  |
| 14 | 90 | [Download](14/dataset.zip) |  |  |  |  |  |  |  |  |
| 15 | 60 | [Download](15/dataset.zip) |  |  |  |  |  |  |  |  |
| 16 | 23 | [Download](16/dataset.zip) |  |  |  |  |  |  |  |  |
| 17 | 15 | [Download](17/dataset.zip) |  |  |  |  |  |  |  |  |
| 18 | 15 | [Download](18/dataset.zip) |  |  |  |  |  |  |  |  |
| 19 | 158 | [Download](19/dataset.zip) |  |  |  |  |  |  |  |  |
| 20 | 33 | [Download](20/dataset.zip) |  |  |  |  |  |  |  |  |
| 21 | 35 | [Download](21/dataset.zip) |  |  |  |  |  |  |  |  |
| 22 | 15 | [Download](22/dataset.zip) |  |  |  |  |  |  |  |  |
| 23 | 21 | [Download](23/dataset.zip) |  |  |  |  |  |  |  |  |
| 24 | 17 | [Download](24/dataset.zip) |  |  |  |  |  |  |  |  |
| 25 | 10 | [Download](25/dataset.zip) |  |  |  |  |  |  |  |  |
| 26 | 513 | [Download](26/dataset.zip) |  |  |  |  |  |  |  |  |
| 27 | 184 | [Download](27/dataset.zip) |  |  |  |  |  |  |  |  |
| 28 | 41 | [Download](28/dataset.zip) |  |  |  |  |  |  |  |  |
| 29 | 45 | [Download](29/dataset.zip) |  |  |  |  |  |  |  |  |
| 30 | 15 | [Download](30/dataset.zip) |  |  |  |  |  |  |  |  |
| 31 | 18 | [Download](31/dataset.zip) |  |  |  |  |  |  |  |  |
| 32 | 8 | [Download](32/dataset.zip) |  |  |  |  |  |  |  |  |
| 33 | 234 | [Download](33/dataset.zip) |  |  |  |  |  |  |  |  |
| 34 | 16 | [Download](34/dataset.zip) |  |  |  |  |  |  |  |  |
| 35 | 7 | [Download](35/dataset.zip) |  |  |  |  |  |  |  | N/A |
| 36 | 56 | [Download](36/dataset.zip) |  |  |  |  |  |  |  |  |
| noise | 242 | [Download](-1/dataset.zip) |  |  |  |  |  |  |  |  |
|
autoevaluate/autoeval-eval-squad-plain_text-ff8cab-63048145474 | 2023-10-04T16:58:09.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-574e8e-63130145475 | 2023-10-04T16:58:16.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
BangumiBase/toloveru | 2023-10-05T09:06:22.000Z | [
"size_categories:1K<n<10K",
"license:mit",
"art",
"region:us"
] | BangumiBase | null | null | null | 0 | 0 | ---
license: mit
tags:
- art
size_categories:
- 1K<n<10K
---
# Bangumi Image Base of To Love-ru
This is the image base of bangumi To LOVE-Ru, we detected 69 characters, 9598 images in total. The full dataset is [here](all.zip).
**Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual.** If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability).
Here is the characters' preview:
| # | Images | Download | Preview 1 | Preview 2 | Preview 3 | Preview 4 | Preview 5 | Preview 6 | Preview 7 | Preview 8 |
|:------|---------:|:---------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|
| 0 | 152 | [Download](0/dataset.zip) |  |  |  |  |  |  |  |  |
| 1 | 29 | [Download](1/dataset.zip) |  |  |  |  |  |  |  |  |
| 2 | 104 | [Download](2/dataset.zip) |  |  |  |  |  |  |  |  |
| 3 | 337 | [Download](3/dataset.zip) |  |  |  |  |  |  |  |  |
| 4 | 439 | [Download](4/dataset.zip) |  |  |  |  |  |  |  |  |
| 5 | 129 | [Download](5/dataset.zip) |  |  |  |  |  |  |  |  |
| 6 | 50 | [Download](6/dataset.zip) |  |  |  |  |  |  |  |  |
| 7 | 29 | [Download](7/dataset.zip) |  |  |  |  |  |  |  |  |
| 8 | 14 | [Download](8/dataset.zip) |  |  |  |  |  |  |  |  |
| 9 | 52 | [Download](9/dataset.zip) |  |  |  |  |  |  |  |  |
| 10 | 80 | [Download](10/dataset.zip) |  |  |  |  |  |  |  |  |
| 11 | 47 | [Download](11/dataset.zip) |  |  |  |  |  |  |  |  |
| 12 | 40 | [Download](12/dataset.zip) |  |  |  |  |  |  |  |  |
| 13 | 606 | [Download](13/dataset.zip) |  |  |  |  |  |  |  |  |
| 14 | 71 | [Download](14/dataset.zip) |  |  |  |  |  |  |  |  |
| 15 | 14 | [Download](15/dataset.zip) |  |  |  |  |  |  |  |  |
| 16 | 24 | [Download](16/dataset.zip) |  |  |  |  |  |  |  |  |
| 17 | 26 | [Download](17/dataset.zip) |  |  |  |  |  |  |  |  |
| 18 | 52 | [Download](18/dataset.zip) |  |  |  |  |  |  |  |  |
| 19 | 42 | [Download](19/dataset.zip) |  |  |  |  |  |  |  |  |
| 20 | 142 | [Download](20/dataset.zip) |  |  |  |  |  |  |  |  |
| 21 | 38 | [Download](21/dataset.zip) |  |  |  |  |  |  |  |  |
| 22 | 62 | [Download](22/dataset.zip) |  |  |  |  |  |  |  |  |
| 23 | 96 | [Download](23/dataset.zip) |  |  |  |  |  |  |  |  |
| 24 | 52 | [Download](24/dataset.zip) |  |  |  |  |  |  |  |  |
| 25 | 171 | [Download](25/dataset.zip) |  |  |  |  |  |  |  |  |
| 26 | 49 | [Download](26/dataset.zip) |  |  |  |  |  |  |  |  |
| 27 | 104 | [Download](27/dataset.zip) |  |  |  |  |  |  |  |  |
| 28 | 16 | [Download](28/dataset.zip) |  |  |  |  |  |  |  |  |
| 29 | 14 | [Download](29/dataset.zip) |  |  |  |  |  |  |  |  |
| 30 | 1783 | [Download](30/dataset.zip) |  |  |  |  |  |  |  |  |
| 31 | 67 | [Download](31/dataset.zip) |  |  |  |  |  |  |  |  |
| 32 | 23 | [Download](32/dataset.zip) |  |  |  |  |  |  |  |  |
| 33 | 33 | [Download](33/dataset.zip) |  |  |  |  |  |  |  |  |
| 34 | 15 | [Download](34/dataset.zip) |  |  |  |  |  |  |  |  |
| 35 | 44 | [Download](35/dataset.zip) |  |  |  |  |  |  |  |  |
| 36 | 27 | [Download](36/dataset.zip) |  |  |  |  |  |  |  |  |
| 37 | 14 | [Download](37/dataset.zip) |  |  |  |  |  |  |  |  |
| 38 | 34 | [Download](38/dataset.zip) |  |  |  |  |  |  |  |  |
| 39 | 34 | [Download](39/dataset.zip) |  |  |  |  |  |  |  |  |
| 40 | 68 | [Download](40/dataset.zip) |  |  |  |  |  |  |  |  |
| 41 | 12 | [Download](41/dataset.zip) |  |  |  |  |  |  |  |  |
| 42 | 44 | [Download](42/dataset.zip) |  |  |  |  |  |  |  |  |
| 43 | 14 | [Download](43/dataset.zip) |  |  |  |  |  |  |  |  |
| 44 | 326 | [Download](44/dataset.zip) |  |  |  |  |  |  |  |  |
| 45 | 699 | [Download](45/dataset.zip) |  |  |  |  |  |  |  |  |
| 46 | 368 | [Download](46/dataset.zip) |  |  |  |  |  |  |  |  |
| 47 | 44 | [Download](47/dataset.zip) |  |  |  |  |  |  |  |  |
| 48 | 873 | [Download](48/dataset.zip) |  |  |  |  |  |  |  |  |
| 49 | 38 | [Download](49/dataset.zip) |  |  |  |  |  |  |  |  |
| 50 | 39 | [Download](50/dataset.zip) |  |  |  |  |  |  |  |  |
| 51 | 92 | [Download](51/dataset.zip) |  |  |  |  |  |  |  |  |
| 52 | 10 | [Download](52/dataset.zip) |  |  |  |  |  |  |  |  |
| 53 | 17 | [Download](53/dataset.zip) |  |  |  |  |  |  |  |  |
| 54 | 15 | [Download](54/dataset.zip) |  |  |  |  |  |  |  |  |
| 55 | 110 | [Download](55/dataset.zip) |  |  |  |  |  |  |  |  |
| 56 | 23 | [Download](56/dataset.zip) |  |  |  |  |  |  |  |  |
| 57 | 584 | [Download](57/dataset.zip) |  |  |  |  |  |  |  |  |
| 58 | 107 | [Download](58/dataset.zip) |  |  |  |  |  |  |  |  |
| 59 | 15 | [Download](59/dataset.zip) |  |  |  |  |  |  |  |  |
| 60 | 30 | [Download](60/dataset.zip) |  |  |  |  |  |  |  |  |
| 61 | 18 | [Download](61/dataset.zip) |  |  |  |  |  |  |  |  |
| 62 | 103 | [Download](62/dataset.zip) |  |  |  |  |  |  |  |  |
| 63 | 37 | [Download](63/dataset.zip) |  |  |  |  |  |  |  |  |
| 64 | 10 | [Download](64/dataset.zip) |  |  |  |  |  |  |  |  |
| 65 | 10 | [Download](65/dataset.zip) |  |  |  |  |  |  |  |  |
| 66 | 77 | [Download](66/dataset.zip) |  |  |  |  |  |  |  |  |
| 67 | 12 | [Download](67/dataset.zip) |  |  |  |  |  |  |  |  |
| noise | 648 | [Download](-1/dataset.zip) |  |  |  |  |  |  |  |  |
|
autoevaluate/autoeval-eval-acronym_identification-default-7b8782-63292145476 | 2023-10-04T16:58:54.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-d2ec3e-63312145477 | 2023-10-04T16:58:57.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-2e0908-63347145478 | 2023-10-04T16:59:32.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-811c9b-63375145479 | 2023-10-04T16:59:35.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-9a048a-63446145480 | 2023-10-04T17:00:10.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-9a048a-63446145481 | 2023-10-04T17:00:14.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-9a048a-63446145482 | 2023-10-04T17:00:33.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-9a048a-63446145483 | 2023-10-04T17:00:41.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-squad_v2-squad_v2-49a99d-61911145449 | 2023-10-04T17:00:53.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-9a048a-63446145484 | 2023-10-04T17:01:09.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-34997c-63467145485 | 2023-10-04T17:01:11.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-yelp_review_full-yelp_review_full-e47303-62996145472 | 2023-10-04T17:01:52.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-34997c-63467145488 | 2023-10-04T17:01:58.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-squad_it-default-123c4d-62722145466 | 2023-10-04T17:02:24.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-34997c-63467145487 | 2023-10-04T17:02:42.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-34997c-63467145486 | 2023-10-04T17:02:58.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-34997c-63467145489 | 2023-10-04T17:03:17.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-multi_nli-default-516b93-63471145491 | 2023-10-04T17:03:20.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-squad_it-default-7c34e7-62734145467 | 2023-10-04T17:03:25.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-multi_nli-default-516b93-63471145490 | 2023-10-04T17:03:58.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-24024a-63483145492 | 2023-10-04T17:04:13.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-24024a-63483145493 | 2023-10-04T17:04:14.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-ade_corpus_v2-Ade_corpus_v2_classification-4a046d-62884145471 | 2023-10-04T17:04:35.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-9bd100-63604145500 | 2023-10-04T17:04:50.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-b0b4bf-63576145498 | 2023-10-04T17:04:51.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-24024a-63483145494 | 2023-10-04T17:04:54.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-170dcc-63631145501 | 2023-10-04T17:05:02.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-c84dea-63639145502 | 2023-10-04T17:05:19.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-amazon_reviews_multi-en-24024a-63483145495 | 2023-10-04T17:05:29.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-ffb511-63661145504 | 2023-10-04T17:05:31.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-e0a483-63649145503 | 2023-10-04T17:05:32.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-imdb-plain_text-cbeb22-63578145499 | 2023-10-04T17:05:42.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-ag_news-default-60f163-63663145505 | 2023-10-04T17:05:56.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-d98459-63705145507 | 2023-10-04T17:06:10.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-acronym_identification-default-2dbe75-62775145469 | 2023-10-04T17:06:16.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
autoevaluate/autoeval-eval-ag_news-default-70b4e4-63721145508 | 2023-10-04T17:06:18.000Z | [
"region:us"
] | autoevaluate | null | null | null | 0 | 0 | Entry not found |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.