id stringlengths 2 115 | author stringlengths 2 42 ⌀ | last_modified timestamp[us, tz=UTC] | downloads int64 0 8.87M | likes int64 0 3.84k | paperswithcode_id stringlengths 2 45 ⌀ | tags list | lastModified timestamp[us, tz=UTC] | createdAt stringlengths 24 24 | key stringclasses 1 value | created timestamp[us] | card stringlengths 1 1.01M | embedding list | library_name stringclasses 21 values | pipeline_tag stringclasses 27 values | mask_token null | card_data null | widget_data null | model_index null | config null | transformers_info null | spaces null | safetensors null | transformersInfo null | modelId stringlengths 5 111 ⌀ | embeddings list |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
kmok1/test | kmok1 | 2023-11-18T15:46:14Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T15:46:14Z | 2023-11-18T13:24:06.000Z | 2023-11-18T13:24:06 | Entry not found | [
-0.32276472449302673,
-0.22568407654762268,
0.8622258901596069,
0.4346148371696472,
-0.5282984972000122,
0.7012965679168701,
0.7915717363357544,
0.07618629932403564,
0.7746022939682007,
0.2563222646713257,
-0.785281777381897,
-0.22573848068714142,
-0.9104482531547546,
0.5715669393539429,
... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Moemu/Muice-Dataset | Moemu | 2023-11-18T15:29:27Z | 0 | 1 | null | [
"task_categories:question-answering",
"task_categories:text-generation",
"task_categories:conversational",
"size_categories:n<1k",
"language:zh",
"license:wtfpl",
"ChatGLM",
"region:us"
] | 2023-11-18T15:29:27Z | 2023-11-18T13:45:13.000Z | 2023-11-18T13:45:13 | ---
license: wtfpl
task_categories:
- question-answering
- text-generation
- conversational
language:
- zh
tags:
- ChatGLM
size_categories:
- n<1k
---
## 导言
这是目前公开的沐雪训练集,一共200条,包含了自我认知,情感对话,对话风格等类。随着沐雪的发展,以后还会有更多的训练集公开
## 许可
本训练集目前使用使用WTFPL(Do What the Fuck You Want to Public License) ,也就是说,**您可以以任何方式使用此训练集,包括商业用途**,希望各位早日造出自己的沐雪!
## 来源
Twitter, Bilibili Live, Douban, [Self_cognition.json](https://github.com/hiyouga/ChatGLM-Efficient-Tuning/blob/main/data/self_cognition.json)
| [
-0.5301586389541626,
-0.6686496138572693,
0.22197166085243225,
1.0505168437957764,
-0.43181437253952026,
-0.11234752833843231,
-0.19424402713775635,
-0.2765522301197052,
-0.07378926873207092,
0.5311924815177917,
-0.5036936402320862,
-0.6419891119003296,
-0.38410109281539917,
-0.14436630904... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
sahil2801/ff | sahil2801 | 2023-11-23T16:28:49Z | 0 | 1 | null | [
"region:us"
] | 2023-11-23T16:28:49Z | 2023-11-18T14:08:32.000Z | 2023-11-18T14:08:32 | Entry not found | [
-0.32276472449302673,
-0.22568407654762268,
0.8622258901596069,
0.4346148371696472,
-0.5282984972000122,
0.7012965679168701,
0.7915717363357544,
0.07618629932403564,
0.7746022939682007,
0.2563222646713257,
-0.785281777381897,
-0.22573848068714142,
-0.9104482531547546,
0.5715669393539429,
... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Kaue123456/PaulinhoGogo | Kaue123456 | 2023-11-18T14:11:54Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-18T14:11:54Z | 2023-11-18T14:09:34.000Z | 2023-11-18T14:09:34 | ---
license: openrail
---
| [
-0.12853392958641052,
-0.18616779148578644,
0.6529127955436707,
0.49436280131340027,
-0.19319361448287964,
0.23607419431209564,
0.36072003841400146,
0.050563063472509384,
0.579365611076355,
0.7400140762329102,
-0.6508104205131531,
-0.23783954977989197,
-0.7102249264717102,
-0.0478260256350... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
SkilDyQQ/test2 | SkilDyQQ | 2023-11-18T14:50:31Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T14:50:31Z | 2023-11-18T14:18:28.000Z | 2023-11-18T14:18:28 | Entry not found | [
-0.3227649927139282,
-0.225684255361557,
0.862226128578186,
0.43461498618125916,
-0.5282987952232361,
0.7012963891029358,
0.7915717363357544,
0.07618629932403564,
0.7746025919914246,
0.2563219666481018,
-0.7852816581726074,
-0.2257382869720459,
-0.9104480743408203,
0.5715669393539429,
-0... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
sczssczxz/venti | sczssczxz | 2023-11-18T14:27:50Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-18T14:27:50Z | 2023-11-18T14:27:14.000Z | 2023-11-18T14:27:14 | ---
license: openrail
---
| [
-0.12853367626667023,
-0.18616794049739838,
0.6529126763343811,
0.4943627417087555,
-0.19319313764572144,
0.23607443273067474,
0.36071979999542236,
0.05056338757276535,
0.5793654322624207,
0.7400138974189758,
-0.6508103013038635,
-0.23783987760543823,
-0.710224986076355,
-0.047825977206230... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Mzh666/DONGZU | Mzh666 | 2023-11-20T07:43:51Z | 0 | 0 | null | [
"license:mit",
"region:us"
] | 2023-11-20T07:43:51Z | 2023-11-18T14:30:26.000Z | 2023-11-18T14:30:26 | ---
license: mit
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 88067000.0
num_examples: 124
download_size: 55259718
dataset_size: 88067000.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
| [
-0.12853367626667023,
-0.18616794049739838,
0.6529126763343811,
0.4943627417087555,
-0.19319313764572144,
0.23607443273067474,
0.36071979999542236,
0.05056338757276535,
0.5793654322624207,
0.7400138974189758,
-0.6508103013038635,
-0.23783987760543823,
-0.710224986076355,
-0.047825977206230... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
BigSuperbPrivate/SpokenQA_SLUE | BigSuperbPrivate | 2023-11-18T14:44:45Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T14:44:45Z | 2023-11-18T14:31:23.000Z | 2023-11-18T14:31:23 | Entry not found | [
-0.3227649927139282,
-0.225684255361557,
0.862226128578186,
0.43461498618125916,
-0.5282987952232361,
0.7012963891029358,
0.7915717363357544,
0.07618629932403564,
0.7746025919914246,
0.2563219666481018,
-0.7852816581726074,
-0.2257382869720459,
-0.9104480743408203,
0.5715669393539429,
-0... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
brianrigoni/pntdf-flores | brianrigoni | 2023-11-18T14:47:42Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T14:47:42Z | 2023-11-18T14:31:58.000Z | 2023-11-18T14:31:58 | Entry not found | [
-0.3227649927139282,
-0.225684255361557,
0.862226128578186,
0.43461498618125916,
-0.5282987952232361,
0.7012963891029358,
0.7915717363357544,
0.07618629932403564,
0.7746025919914246,
0.2563219666481018,
-0.7852816581726074,
-0.2257382869720459,
-0.9104480743408203,
0.5715669393539429,
-0... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DynamicSuperb/SpokenQA_SLUE | DynamicSuperb | 2023-11-18T14:46:34Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T14:46:34Z | 2023-11-18T14:35:11.000Z | 2023-11-18T14:35:11 | Entry not found | [
-0.3227649927139282,
-0.225684255361557,
0.862226128578186,
0.43461498618125916,
-0.5282987952232361,
0.7012963891029358,
0.7915717363357544,
0.07618629932403564,
0.7746025919914246,
0.2563219666481018,
-0.7852816581726074,
-0.2257382869720459,
-0.9104480743408203,
0.5715669393539429,
-0... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
tim9292654/tags | tim9292654 | 2023-11-18T14:38:21Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T14:38:21Z | 2023-11-18T14:37:48.000Z | 2023-11-18T14:37:48 | Entry not found | [
-0.3227649927139282,
-0.225684255361557,
0.862226128578186,
0.43461498618125916,
-0.5282987952232361,
0.7012963891029358,
0.7915717363357544,
0.07618629932403564,
0.7746025919914246,
0.2563219666481018,
-0.7852816581726074,
-0.2257382869720459,
-0.9104480743408203,
0.5715669393539429,
-0... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
textminr/ner-test | textminr | 2023-11-18T17:19:00Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T17:19:00Z | 2023-11-18T14:54:43.000Z | 2023-11-18T14:54:43 | Entry not found | [
-0.3227649927139282,
-0.225684255361557,
0.862226128578186,
0.43461498618125916,
-0.5282987952232361,
0.7012963891029358,
0.7915717363357544,
0.07618629932403564,
0.7746025919914246,
0.2563219666481018,
-0.7852816581726074,
-0.2257382869720459,
-0.9104480743408203,
0.5715669393539429,
-0... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
kdawoud91/News_Articles | kdawoud91 | 2023-11-18T14:59:46Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T14:59:46Z | 2023-11-18T14:58:54.000Z | 2023-11-18T14:58:54 | Entry not found | [
-0.32276472449302673,
-0.22568407654762268,
0.8622258901596069,
0.4346148371696472,
-0.5282984972000122,
0.7012965679168701,
0.7915717363357544,
0.07618629932403564,
0.7746022939682007,
0.2563222646713257,
-0.785281777381897,
-0.22573848068714142,
-0.9104482531547546,
0.5715669393539429,
... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
rodrigomokipo/bowie2 | rodrigomokipo | 2023-11-18T15:10:08Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-18T15:10:08Z | 2023-11-18T15:09:01.000Z | 2023-11-18T15:09:01 | ---
license: openrail
---
| [
-0.12853392958641052,
-0.18616779148578644,
0.6529127955436707,
0.49436280131340027,
-0.19319361448287964,
0.23607419431209564,
0.36072003841400146,
0.050563063472509384,
0.579365611076355,
0.7400140762329102,
-0.6508104205131531,
-0.23783954977989197,
-0.7102249264717102,
-0.0478260256350... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Leofierus/Drone-Dataset | Leofierus | 2023-11-18T15:16:29Z | 0 | 0 | null | [
"license:mit",
"region:us"
] | 2023-11-18T15:16:29Z | 2023-11-18T15:09:01.000Z | 2023-11-18T15:09:01 | ---
license: mit
---
The given dataset is a clone of the [drone dataset](https://www.kaggle.com/datasets/dasmehdixtr/drone-dataset-uav) on Kaggle.
It is created by [Mehdi Özel](https://www.researchgate.net/profile/Mehdi-Oezel). | [
-0.6426308155059814,
-0.4187977910041809,
-0.09806672483682632,
-0.43388330936431885,
0.04622220993041992,
0.3321099579334259,
0.6307333111763,
-0.01161559484899044,
0.7574605941772461,
0.5849515199661255,
-0.984983503818512,
-0.1834777295589447,
-0.30301907658576965,
-0.30234354734420776,... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
KADUZADA/MUMUZINHO | KADUZADA | 2023-11-19T19:17:06Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T19:17:06Z | 2023-11-18T15:10:28.000Z | 2023-11-18T15:10:28 | Entry not found | [
-0.32276472449302673,
-0.22568407654762268,
0.8622258901596069,
0.4346148371696472,
-0.5282984972000122,
0.7012965679168701,
0.7915717363357544,
0.07618629932403564,
0.7746022939682007,
0.2563222646713257,
-0.785281777381897,
-0.22573848068714142,
-0.9104482531547546,
0.5715669393539429,
... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
HackUN/3d_Gaussian_splatting | HackUN | 2023-11-20T22:37:30Z | 0 | 0 | null | [
"license:mit",
"region:us"
] | 2023-11-20T22:37:30Z | 2023-11-18T15:16:26.000Z | 2023-11-18T15:16:26 | ---
license: mit
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_Intel__neural-chat-7b-v3-1_public | open-llm-leaderboard | 2023-11-18T15:46:34Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T15:46:34Z | 2023-11-18T15:22:20.000Z | 2023-11-18T15:22:20 | ---
pretty_name: Evaluation run of Intel/neural-chat-7b-v3-1
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [Intel/neural-chat-7b-v3-1](https://huggingface.co/Intel/neural-chat-7b-v3-1)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Intel__neural-chat-7b-v3-1_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T15:42:45.444313](https://huggingface.co/datasets/open-llm-leaderboard/details_Intel__neural-chat-7b-v3-1_public/blob/main/results_2023-11-18T15-42-45.444313.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6203975476749912,\n\
\ \"acc_stderr\": 0.03253317374017875,\n \"acc_norm\": 0.6286844485803,\n\
\ \"acc_norm_stderr\": 0.03323093034337969,\n \"mc1\": 0.44063647490820074,\n\
\ \"mc1_stderr\": 0.01737969755543745,\n \"mc2\": 0.5953808732777186,\n\
\ \"mc2_stderr\": 0.015347393503467649,\n \"em\": 0.3183724832214765,\n\
\ \"em_stderr\": 0.004770687516057205,\n \"f1\": 0.44000419463087526,\n\
\ \"f1_stderr\": 0.00452137107601273\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6322525597269625,\n \"acc_stderr\": 0.01409099561816848,\n\
\ \"acc_norm\": 0.6629692832764505,\n \"acc_norm_stderr\": 0.013813476652902276\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6446922923720374,\n\
\ \"acc_stderr\": 0.0047762832034680975,\n \"acc_norm\": 0.8359888468432584,\n\
\ \"acc_norm_stderr\": 0.003695289340514483\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \
\ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n\
\ \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n\
\ \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.0378272898086547,\n\
\ \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.0378272898086547\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n\
\ \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \
\ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6716981132075471,\n \"acc_stderr\": 0.02890159361241178,\n\
\ \"acc_norm\": 0.6716981132075471,\n \"acc_norm_stderr\": 0.02890159361241178\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7222222222222222,\n\
\ \"acc_stderr\": 0.037455547914624555,\n \"acc_norm\": 0.7222222222222222,\n\
\ \"acc_norm_stderr\": 0.037455547914624555\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \
\ \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n\
\ \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.630057803468208,\n\
\ \"acc_stderr\": 0.0368122963339432,\n \"acc_norm\": 0.630057803468208,\n\
\ \"acc_norm_stderr\": 0.0368122963339432\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266345,\n\
\ \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266345\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.74,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.74,\n\
\ \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5148936170212766,\n \"acc_stderr\": 0.032671518489247764,\n\
\ \"acc_norm\": 0.5148936170212766,\n \"acc_norm_stderr\": 0.032671518489247764\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n\
\ \"acc_stderr\": 0.046920083813689104,\n \"acc_norm\": 0.4649122807017544,\n\
\ \"acc_norm_stderr\": 0.046920083813689104\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n\
\ \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.3862433862433862,\n \"acc_stderr\": 0.025075981767601684,\n \"\
acc_norm\": 0.3862433862433862,\n \"acc_norm_stderr\": 0.025075981767601684\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n\
\ \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n\
\ \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7709677419354839,\n\
\ \"acc_stderr\": 0.023904914311782658,\n \"acc_norm\": 0.7709677419354839,\n\
\ \"acc_norm_stderr\": 0.023904914311782658\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.03514528562175008,\n\
\ \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.03514528562175008\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\"\
: 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009182,\n\
\ \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009182\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7575757575757576,\n \"acc_stderr\": 0.03053289223393202,\n \"\
acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03053289223393202\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033446,\n\
\ \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033446\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6051282051282051,\n \"acc_stderr\": 0.024784316942156395,\n\
\ \"acc_norm\": 0.6051282051282051,\n \"acc_norm_stderr\": 0.024784316942156395\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948485,\n \
\ \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948485\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886793,\n\
\ \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886793\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242741,\n \"\
acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242741\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8366972477064221,\n \"acc_stderr\": 0.015848255806501562,\n \"\
acc_norm\": 0.8366972477064221,\n \"acc_norm_stderr\": 0.015848255806501562\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.4861111111111111,\n \"acc_stderr\": 0.03408655867977749,\n \"\
acc_norm\": 0.4861111111111111,\n \"acc_norm_stderr\": 0.03408655867977749\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.803921568627451,\n \"acc_stderr\": 0.027865942286639325,\n \"\
acc_norm\": 0.803921568627451,\n \"acc_norm_stderr\": 0.027865942286639325\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7974683544303798,\n \"acc_stderr\": 0.026160568246601446,\n \
\ \"acc_norm\": 0.7974683544303798,\n \"acc_norm_stderr\": 0.026160568246601446\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n\
\ \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n\
\ \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.732824427480916,\n \"acc_stderr\": 0.038808483010823944,\n\
\ \"acc_norm\": 0.732824427480916,\n \"acc_norm_stderr\": 0.038808483010823944\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"\
acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n\
\ \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.7592592592592593,\n\
\ \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7177914110429447,\n \"acc_stderr\": 0.03536117886664742,\n\
\ \"acc_norm\": 0.7177914110429447,\n \"acc_norm_stderr\": 0.03536117886664742\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n\
\ \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n\
\ \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.037601780060266196,\n\
\ \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.037601780060266196\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n\
\ \"acc_stderr\": 0.022509033937077805,\n \"acc_norm\": 0.8632478632478633,\n\
\ \"acc_norm_stderr\": 0.022509033937077805\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \
\ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8122605363984674,\n\
\ \"acc_stderr\": 0.01396439376989914,\n \"acc_norm\": 0.8122605363984674,\n\
\ \"acc_norm_stderr\": 0.01396439376989914\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.6791907514450867,\n \"acc_stderr\": 0.025131000233647893,\n\
\ \"acc_norm\": 0.6791907514450867,\n \"acc_norm_stderr\": 0.025131000233647893\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.38100558659217876,\n\
\ \"acc_stderr\": 0.016242028834053627,\n \"acc_norm\": 0.38100558659217876,\n\
\ \"acc_norm_stderr\": 0.016242028834053627\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.696078431372549,\n \"acc_stderr\": 0.026336613469046626,\n\
\ \"acc_norm\": 0.696078431372549,\n \"acc_norm_stderr\": 0.026336613469046626\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6752411575562701,\n\
\ \"acc_stderr\": 0.026596782287697043,\n \"acc_norm\": 0.6752411575562701,\n\
\ \"acc_norm_stderr\": 0.026596782287697043\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7191358024691358,\n \"acc_stderr\": 0.02500646975579921,\n\
\ \"acc_norm\": 0.7191358024691358,\n \"acc_norm_stderr\": 0.02500646975579921\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4432624113475177,\n \"acc_stderr\": 0.029634838473766006,\n \
\ \"acc_norm\": 0.4432624113475177,\n \"acc_norm_stderr\": 0.029634838473766006\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4439374185136897,\n\
\ \"acc_stderr\": 0.012689708167787684,\n \"acc_norm\": 0.4439374185136897,\n\
\ \"acc_norm_stderr\": 0.012689708167787684\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6544117647058824,\n \"acc_stderr\": 0.02888819310398863,\n\
\ \"acc_norm\": 0.6544117647058824,\n \"acc_norm_stderr\": 0.02888819310398863\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6421568627450981,\n \"acc_stderr\": 0.019393058402355435,\n \
\ \"acc_norm\": 0.6421568627450981,\n \"acc_norm_stderr\": 0.019393058402355435\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n\
\ \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n\
\ \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.710204081632653,\n \"acc_stderr\": 0.029043088683304328,\n\
\ \"acc_norm\": 0.710204081632653,\n \"acc_norm_stderr\": 0.029043088683304328\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n\
\ \"acc_stderr\": 0.02587064676616913,\n \"acc_norm\": 0.8407960199004975,\n\
\ \"acc_norm_stderr\": 0.02587064676616913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \
\ \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n\
\ \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n\
\ \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640038,\n\
\ \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640038\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.44063647490820074,\n\
\ \"mc1_stderr\": 0.01737969755543745,\n \"mc2\": 0.5953808732777186,\n\
\ \"mc2_stderr\": 0.015347393503467649\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7797947908445146,\n \"acc_stderr\": 0.011646276755089691\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.3183724832214765,\n \
\ \"em_stderr\": 0.004770687516057205,\n \"f1\": 0.44000419463087526,\n\
\ \"f1_stderr\": 0.00452137107601273\n },\n \"harness|gsm8k|5\": {\n\
\ \"acc\": 0.19408642911296436,\n \"acc_stderr\": 0.01089391830819241\n\
\ }\n}\n```"
repo_url: https://huggingface.co/Intel/neural-chat-7b-v3-1
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|arc:challenge|25_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|arc:challenge|25_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|arc:challenge|25_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|drop|3_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|drop|3_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|drop|3_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|gsm8k|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|gsm8k|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|gsm8k|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hellaswag|10_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hellaswag|10_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hellaswag|10_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-19-14.739909.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-23-13.598780.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-42-45.444313.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T15-42-45.444313.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- '**/details_harness|winogrande|5_2023-11-18T15-19-14.739909.parquet'
- split: 2023_11_18T15_23_13.598780
path:
- '**/details_harness|winogrande|5_2023-11-18T15-23-13.598780.parquet'
- split: 2023_11_18T15_42_45.444313
path:
- '**/details_harness|winogrande|5_2023-11-18T15-42-45.444313.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T15-42-45.444313.parquet'
- config_name: results
data_files:
- split: 2023_11_18T15_19_14.739909
path:
- results_2023-11-18T15-19-14.739909.parquet
- split: 2023_11_18T15_23_13.598780
path:
- results_2023-11-18T15-23-13.598780.parquet
- split: 2023_11_18T15_42_45.444313
path:
- results_2023-11-18T15-42-45.444313.parquet
- split: latest
path:
- results_2023-11-18T15-42-45.444313.parquet
---
# Dataset Card for Evaluation run of Intel/neural-chat-7b-v3-1
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/Intel/neural-chat-7b-v3-1
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [Intel/neural-chat-7b-v3-1](https://huggingface.co/Intel/neural-chat-7b-v3-1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_Intel__neural-chat-7b-v3-1_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T15:42:45.444313](https://huggingface.co/datasets/open-llm-leaderboard/details_Intel__neural-chat-7b-v3-1_public/blob/main/results_2023-11-18T15-42-45.444313.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6203975476749912,
"acc_stderr": 0.03253317374017875,
"acc_norm": 0.6286844485803,
"acc_norm_stderr": 0.03323093034337969,
"mc1": 0.44063647490820074,
"mc1_stderr": 0.01737969755543745,
"mc2": 0.5953808732777186,
"mc2_stderr": 0.015347393503467649,
"em": 0.3183724832214765,
"em_stderr": 0.004770687516057205,
"f1": 0.44000419463087526,
"f1_stderr": 0.00452137107601273
},
"harness|arc:challenge|25": {
"acc": 0.6322525597269625,
"acc_stderr": 0.01409099561816848,
"acc_norm": 0.6629692832764505,
"acc_norm_stderr": 0.013813476652902276
},
"harness|hellaswag|10": {
"acc": 0.6446922923720374,
"acc_stderr": 0.0047762832034680975,
"acc_norm": 0.8359888468432584,
"acc_norm_stderr": 0.003695289340514483
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6148148148148148,
"acc_stderr": 0.04203921040156279,
"acc_norm": 0.6148148148148148,
"acc_norm_stderr": 0.04203921040156279
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6842105263157895,
"acc_stderr": 0.0378272898086547,
"acc_norm": 0.6842105263157895,
"acc_norm_stderr": 0.0378272898086547
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6716981132075471,
"acc_stderr": 0.02890159361241178,
"acc_norm": 0.6716981132075471,
"acc_norm_stderr": 0.02890159361241178
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.037455547914624555,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.037455547914624555
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.04793724854411019,
"acc_norm": 0.35,
"acc_norm_stderr": 0.04793724854411019
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.630057803468208,
"acc_stderr": 0.0368122963339432,
"acc_norm": 0.630057803468208,
"acc_norm_stderr": 0.0368122963339432
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.39215686274509803,
"acc_stderr": 0.04858083574266345,
"acc_norm": 0.39215686274509803,
"acc_norm_stderr": 0.04858083574266345
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.74,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.74,
"acc_norm_stderr": 0.044084400227680794
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5148936170212766,
"acc_stderr": 0.032671518489247764,
"acc_norm": 0.5148936170212766,
"acc_norm_stderr": 0.032671518489247764
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4649122807017544,
"acc_stderr": 0.046920083813689104,
"acc_norm": 0.4649122807017544,
"acc_norm_stderr": 0.046920083813689104
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5241379310344828,
"acc_stderr": 0.0416180850350153,
"acc_norm": 0.5241379310344828,
"acc_norm_stderr": 0.0416180850350153
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3862433862433862,
"acc_stderr": 0.025075981767601684,
"acc_norm": 0.3862433862433862,
"acc_norm_stderr": 0.025075981767601684
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.0442626668137991,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.0442626668137991
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7709677419354839,
"acc_stderr": 0.023904914311782658,
"acc_norm": 0.7709677419354839,
"acc_norm_stderr": 0.023904914311782658
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5221674876847291,
"acc_stderr": 0.03514528562175008,
"acc_norm": 0.5221674876847291,
"acc_norm_stderr": 0.03514528562175008
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.68,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.68,
"acc_norm_stderr": 0.04688261722621505
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7636363636363637,
"acc_stderr": 0.03317505930009182,
"acc_norm": 0.7636363636363637,
"acc_norm_stderr": 0.03317505930009182
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7575757575757576,
"acc_stderr": 0.03053289223393202,
"acc_norm": 0.7575757575757576,
"acc_norm_stderr": 0.03053289223393202
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9015544041450777,
"acc_stderr": 0.021500249576033446,
"acc_norm": 0.9015544041450777,
"acc_norm_stderr": 0.021500249576033446
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6051282051282051,
"acc_stderr": 0.024784316942156395,
"acc_norm": 0.6051282051282051,
"acc_norm_stderr": 0.024784316942156395
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.028742040903948485,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.028742040903948485
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.030388353551886793,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.030388353551886793
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.36423841059602646,
"acc_stderr": 0.03929111781242741,
"acc_norm": 0.36423841059602646,
"acc_norm_stderr": 0.03929111781242741
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8366972477064221,
"acc_stderr": 0.015848255806501562,
"acc_norm": 0.8366972477064221,
"acc_norm_stderr": 0.015848255806501562
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.4861111111111111,
"acc_stderr": 0.03408655867977749,
"acc_norm": 0.4861111111111111,
"acc_norm_stderr": 0.03408655867977749
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.803921568627451,
"acc_stderr": 0.027865942286639325,
"acc_norm": 0.803921568627451,
"acc_norm_stderr": 0.027865942286639325
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7974683544303798,
"acc_stderr": 0.026160568246601446,
"acc_norm": 0.7974683544303798,
"acc_norm_stderr": 0.026160568246601446
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6816143497757847,
"acc_stderr": 0.03126580522513713,
"acc_norm": 0.6816143497757847,
"acc_norm_stderr": 0.03126580522513713
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.732824427480916,
"acc_stderr": 0.038808483010823944,
"acc_norm": 0.732824427480916,
"acc_norm_stderr": 0.038808483010823944
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7851239669421488,
"acc_stderr": 0.037494924487096966,
"acc_norm": 0.7851239669421488,
"acc_norm_stderr": 0.037494924487096966
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7592592592592593,
"acc_stderr": 0.04133119440243839,
"acc_norm": 0.7592592592592593,
"acc_norm_stderr": 0.04133119440243839
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7177914110429447,
"acc_stderr": 0.03536117886664742,
"acc_norm": 0.7177914110429447,
"acc_norm_stderr": 0.03536117886664742
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.48214285714285715,
"acc_stderr": 0.047427623612430116,
"acc_norm": 0.48214285714285715,
"acc_norm_stderr": 0.047427623612430116
},
"harness|hendrycksTest-management|5": {
"acc": 0.8252427184466019,
"acc_stderr": 0.037601780060266196,
"acc_norm": 0.8252427184466019,
"acc_norm_stderr": 0.037601780060266196
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8632478632478633,
"acc_stderr": 0.022509033937077805,
"acc_norm": 0.8632478632478633,
"acc_norm_stderr": 0.022509033937077805
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542128
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8122605363984674,
"acc_stderr": 0.01396439376989914,
"acc_norm": 0.8122605363984674,
"acc_norm_stderr": 0.01396439376989914
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6791907514450867,
"acc_stderr": 0.025131000233647893,
"acc_norm": 0.6791907514450867,
"acc_norm_stderr": 0.025131000233647893
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.38100558659217876,
"acc_stderr": 0.016242028834053627,
"acc_norm": 0.38100558659217876,
"acc_norm_stderr": 0.016242028834053627
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.696078431372549,
"acc_stderr": 0.026336613469046626,
"acc_norm": 0.696078431372549,
"acc_norm_stderr": 0.026336613469046626
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6752411575562701,
"acc_stderr": 0.026596782287697043,
"acc_norm": 0.6752411575562701,
"acc_norm_stderr": 0.026596782287697043
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7191358024691358,
"acc_stderr": 0.02500646975579921,
"acc_norm": 0.7191358024691358,
"acc_norm_stderr": 0.02500646975579921
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4432624113475177,
"acc_stderr": 0.029634838473766006,
"acc_norm": 0.4432624113475177,
"acc_norm_stderr": 0.029634838473766006
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4439374185136897,
"acc_stderr": 0.012689708167787684,
"acc_norm": 0.4439374185136897,
"acc_norm_stderr": 0.012689708167787684
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6544117647058824,
"acc_stderr": 0.02888819310398863,
"acc_norm": 0.6544117647058824,
"acc_norm_stderr": 0.02888819310398863
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6421568627450981,
"acc_stderr": 0.019393058402355435,
"acc_norm": 0.6421568627450981,
"acc_norm_stderr": 0.019393058402355435
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6545454545454545,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.6545454545454545,
"acc_norm_stderr": 0.04554619617541054
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.710204081632653,
"acc_stderr": 0.029043088683304328,
"acc_norm": 0.710204081632653,
"acc_norm_stderr": 0.029043088683304328
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8407960199004975,
"acc_stderr": 0.02587064676616913,
"acc_norm": 0.8407960199004975,
"acc_norm_stderr": 0.02587064676616913
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.82,
"acc_stderr": 0.038612291966536934,
"acc_norm": 0.82,
"acc_norm_stderr": 0.038612291966536934
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5120481927710844,
"acc_stderr": 0.03891364495835817,
"acc_norm": 0.5120481927710844,
"acc_norm_stderr": 0.03891364495835817
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8187134502923976,
"acc_stderr": 0.029547741687640038,
"acc_norm": 0.8187134502923976,
"acc_norm_stderr": 0.029547741687640038
},
"harness|truthfulqa:mc|0": {
"mc1": 0.44063647490820074,
"mc1_stderr": 0.01737969755543745,
"mc2": 0.5953808732777186,
"mc2_stderr": 0.015347393503467649
},
"harness|winogrande|5": {
"acc": 0.7797947908445146,
"acc_stderr": 0.011646276755089691
},
"harness|drop|3": {
"em": 0.3183724832214765,
"em_stderr": 0.004770687516057205,
"f1": 0.44000419463087526,
"f1_stderr": 0.00452137107601273
},
"harness|gsm8k|5": {
"acc": 0.19408642911296436,
"acc_stderr": 0.01089391830819241
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.6937188506126404,
-0.8709767460823059,
0.25153401494026184,
0.2353963851928711,
-0.17546570301055908,
-0.04844198003411293,
-0.01601380668580532,
-0.24097397923469543,
0.5789085030555725,
-0.08724884688854218,
-0.47637268900871277,
-0.7000318765640259,
-0.442110538482666,
0.194268658757... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_rinna__bilingual-gpt-neox-4b-instruction-sft_public | open-llm-leaderboard | 2023-11-18T15:28:40Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T15:28:40Z | 2023-11-18T15:27:51.000Z | 2023-11-18T15:27:51 | ---
pretty_name: Evaluation run of rinna/bilingual-gpt-neox-4b-instruction-sft
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [rinna/bilingual-gpt-neox-4b-instruction-sft](https://huggingface.co/rinna/bilingual-gpt-neox-4b-instruction-sft)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_rinna__bilingual-gpt-neox-4b-instruction-sft_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T15:24:39.768473](https://huggingface.co/datasets/open-llm-leaderboard/details_rinna__bilingual-gpt-neox-4b-instruction-sft_public/blob/main/results_2023-11-18T15-24-39.768473.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23492916714087597,\n\
\ \"acc_stderr\": 0.029949341295022296,\n \"acc_norm\": 0.2361411950711662,\n\
\ \"acc_norm_stderr\": 0.030737498591548063,\n \"mc1\": 0.2558139534883721,\n\
\ \"mc1_stderr\": 0.01527417621928336,\n \"mc2\": 0.4375927514815572,\n\
\ \"mc2_stderr\": 0.014730946208002945,\n \"em\": 0.0,\n \"\
em_stderr\": 0.0,\n \"f1\": 0.004410654362416105,\n \"f1_stderr\"\
: 0.0003037121224665498\n },\n \"harness|arc:challenge|25\": {\n \"\
acc\": 0.24829351535836178,\n \"acc_stderr\": 0.012624912868089758,\n \
\ \"acc_norm\": 0.28071672354948807,\n \"acc_norm_stderr\": 0.013131238126975574\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.3824935271858196,\n\
\ \"acc_stderr\": 0.004850028813189974,\n \"acc_norm\": 0.4750049790878311,\n\
\ \"acc_norm_stderr\": 0.0049835427688535525\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \
\ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n\
\ \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n\
\ \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n\
\ \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n\
\ \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \
\ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n\
\ \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n\
\ \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n\
\ \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036845,\n \
\ \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n\
\ \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n\
\ \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \
\ \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \
\ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n\
\ \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n\
\ \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n\
\ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n\
\ \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n\
\ \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\
\ \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n\
\ \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.2413793103448276,\n \"acc_stderr\": 0.03565998174135302,\n\
\ \"acc_norm\": 0.2413793103448276,\n \"acc_norm_stderr\": 0.03565998174135302\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.20899470899470898,\n \"acc_stderr\": 0.02094048156533486,\n \"\
acc_norm\": 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533486\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2857142857142857,\n\
\ \"acc_stderr\": 0.04040610178208841,\n \"acc_norm\": 0.2857142857142857,\n\
\ \"acc_norm_stderr\": 0.04040610178208841\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \
\ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"\
acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938063,\n \"\
acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938063\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\"\
: 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n\
\ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"\
acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n\
\ \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.20256410256410257,\n \"acc_stderr\": 0.020377660970371372,\n\
\ \"acc_norm\": 0.20256410256410257,\n \"acc_norm_stderr\": 0.020377660970371372\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.2111111111111111,\n \"acc_stderr\": 0.024882116857655075,\n \
\ \"acc_norm\": 0.2111111111111111,\n \"acc_norm_stderr\": 0.024882116857655075\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n\
\ \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.1986754966887417,\n \"acc_stderr\": 0.03257847384436776,\n \"\
acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"\
acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"\
acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n\
\ \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\
: {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n\
\ \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n\
\ \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n\
\ \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n\
\ \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"\
acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n\
\ \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n\
\ \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n\
\ \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n\
\ \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n\
\ \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n\
\ \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n\
\ \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n\
\ \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n\
\ \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n\
\ \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n\
\ \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n\
\ \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n\
\ \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n\
\ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n\
\ \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n\
\ \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n\
\ \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \
\ \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n\
\ \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n\
\ \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n\
\ \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\"\
: 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\"\
: {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n\
\ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n\
\ },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n\
\ \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n\
\ \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\"\
: {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n\
\ \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n\
\ },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\":\
\ 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n\
\ \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\"\
: {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n\
\ \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n\
\ },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n\
\ \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n\
\ \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\"\
: {\n \"mc1\": 0.2558139534883721,\n \"mc1_stderr\": 0.01527417621928336,\n\
\ \"mc2\": 0.4375927514815572,\n \"mc2_stderr\": 0.014730946208002945\n\
\ },\n \"harness|winogrande|5\": {\n \"acc\": 0.5232833464877664,\n\
\ \"acc_stderr\": 0.01403724130957364\n },\n \"harness|drop|3\": {\n\
\ \"em\": 0.0,\n \"em_stderr\": 0.0,\n \"f1\": 0.004410654362416105,\n\
\ \"f1_stderr\": 0.0003037121224665498\n },\n \"harness|gsm8k|5\":\
\ {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```"
repo_url: https://huggingface.co/rinna/bilingual-gpt-neox-4b-instruction-sft
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|arc:challenge|25_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|drop|3_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|gsm8k|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hellaswag|10_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-24-39.768473.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T15-24-39.768473.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- '**/details_harness|winogrande|5_2023-11-18T15-24-39.768473.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T15-24-39.768473.parquet'
- config_name: results
data_files:
- split: 2023_11_18T15_24_39.768473
path:
- results_2023-11-18T15-24-39.768473.parquet
- split: latest
path:
- results_2023-11-18T15-24-39.768473.parquet
---
# Dataset Card for Evaluation run of rinna/bilingual-gpt-neox-4b-instruction-sft
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/rinna/bilingual-gpt-neox-4b-instruction-sft
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [rinna/bilingual-gpt-neox-4b-instruction-sft](https://huggingface.co/rinna/bilingual-gpt-neox-4b-instruction-sft) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_rinna__bilingual-gpt-neox-4b-instruction-sft_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T15:24:39.768473](https://huggingface.co/datasets/open-llm-leaderboard/details_rinna__bilingual-gpt-neox-4b-instruction-sft_public/blob/main/results_2023-11-18T15-24-39.768473.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.23492916714087597,
"acc_stderr": 0.029949341295022296,
"acc_norm": 0.2361411950711662,
"acc_norm_stderr": 0.030737498591548063,
"mc1": 0.2558139534883721,
"mc1_stderr": 0.01527417621928336,
"mc2": 0.4375927514815572,
"mc2_stderr": 0.014730946208002945,
"em": 0.0,
"em_stderr": 0.0,
"f1": 0.004410654362416105,
"f1_stderr": 0.0003037121224665498
},
"harness|arc:challenge|25": {
"acc": 0.24829351535836178,
"acc_stderr": 0.012624912868089758,
"acc_norm": 0.28071672354948807,
"acc_norm_stderr": 0.013131238126975574
},
"harness|hellaswag|10": {
"acc": 0.3824935271858196,
"acc_stderr": 0.004850028813189974,
"acc_norm": 0.4750049790878311,
"acc_norm_stderr": 0.0049835427688535525
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932268,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932268
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.18518518518518517,
"acc_stderr": 0.03355677216313142,
"acc_norm": 0.18518518518518517,
"acc_norm_stderr": 0.03355677216313142
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.17763157894736842,
"acc_stderr": 0.031103182383123398,
"acc_norm": 0.17763157894736842,
"acc_norm_stderr": 0.031103182383123398
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.21509433962264152,
"acc_stderr": 0.02528839450289137,
"acc_norm": 0.21509433962264152,
"acc_norm_stderr": 0.02528839450289137
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.2569444444444444,
"acc_stderr": 0.03653946969442099,
"acc_norm": 0.2569444444444444,
"acc_norm_stderr": 0.03653946969442099
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.2,
"acc_stderr": 0.04020151261036845,
"acc_norm": 0.2,
"acc_norm_stderr": 0.04020151261036845
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.26,
"acc_stderr": 0.0440844002276808,
"acc_norm": 0.26,
"acc_norm_stderr": 0.0440844002276808
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.20809248554913296,
"acc_stderr": 0.030952890217749874,
"acc_norm": 0.20809248554913296,
"acc_norm_stderr": 0.030952890217749874
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.21568627450980393,
"acc_stderr": 0.04092563958237654,
"acc_norm": 0.21568627450980393,
"acc_norm_stderr": 0.04092563958237654
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.28,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.28,
"acc_norm_stderr": 0.045126085985421276
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.26382978723404255,
"acc_stderr": 0.028809989854102973,
"acc_norm": 0.26382978723404255,
"acc_norm_stderr": 0.028809989854102973
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.039994238792813365,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.039994238792813365
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.2413793103448276,
"acc_stderr": 0.03565998174135302,
"acc_norm": 0.2413793103448276,
"acc_norm_stderr": 0.03565998174135302
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.20899470899470898,
"acc_stderr": 0.02094048156533486,
"acc_norm": 0.20899470899470898,
"acc_norm_stderr": 0.02094048156533486
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.04040610178208841,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.04040610178208841
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.18,
"acc_stderr": 0.038612291966536934,
"acc_norm": 0.18,
"acc_norm_stderr": 0.038612291966536934
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.1774193548387097,
"acc_stderr": 0.02173254068932927,
"acc_norm": 0.1774193548387097,
"acc_norm_stderr": 0.02173254068932927
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.15270935960591134,
"acc_stderr": 0.02530890453938063,
"acc_norm": 0.15270935960591134,
"acc_norm_stderr": 0.02530890453938063
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.21818181818181817,
"acc_stderr": 0.03225078108306289,
"acc_norm": 0.21818181818181817,
"acc_norm_stderr": 0.03225078108306289
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.17676767676767677,
"acc_stderr": 0.027178752639044915,
"acc_norm": 0.17676767676767677,
"acc_norm_stderr": 0.027178752639044915
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.19689119170984457,
"acc_stderr": 0.028697873971860664,
"acc_norm": 0.19689119170984457,
"acc_norm_stderr": 0.028697873971860664
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.20256410256410257,
"acc_stderr": 0.020377660970371372,
"acc_norm": 0.20256410256410257,
"acc_norm_stderr": 0.020377660970371372
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2111111111111111,
"acc_stderr": 0.024882116857655075,
"acc_norm": 0.2111111111111111,
"acc_norm_stderr": 0.024882116857655075
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.21008403361344538,
"acc_stderr": 0.026461398717471874,
"acc_norm": 0.21008403361344538,
"acc_norm_stderr": 0.026461398717471874
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.1986754966887417,
"acc_stderr": 0.03257847384436776,
"acc_norm": 0.1986754966887417,
"acc_norm_stderr": 0.03257847384436776
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.1926605504587156,
"acc_stderr": 0.016909276884936094,
"acc_norm": 0.1926605504587156,
"acc_norm_stderr": 0.016909276884936094
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.1527777777777778,
"acc_stderr": 0.024536326026134224,
"acc_norm": 0.1527777777777778,
"acc_norm_stderr": 0.024536326026134224
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.25,
"acc_stderr": 0.03039153369274154,
"acc_norm": 0.25,
"acc_norm_stderr": 0.03039153369274154
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.270042194092827,
"acc_stderr": 0.028900721906293426,
"acc_norm": 0.270042194092827,
"acc_norm_stderr": 0.028900721906293426
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.31390134529147984,
"acc_stderr": 0.031146796482972465,
"acc_norm": 0.31390134529147984,
"acc_norm_stderr": 0.031146796482972465
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.2595419847328244,
"acc_stderr": 0.03844876139785271,
"acc_norm": 0.2595419847328244,
"acc_norm_stderr": 0.03844876139785271
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.2396694214876033,
"acc_stderr": 0.03896878985070417,
"acc_norm": 0.2396694214876033,
"acc_norm_stderr": 0.03896878985070417
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.042365112580946336,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.042365112580946336
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.22085889570552147,
"acc_stderr": 0.032591773927421776,
"acc_norm": 0.22085889570552147,
"acc_norm_stderr": 0.032591773927421776
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.3125,
"acc_stderr": 0.043994650575715215,
"acc_norm": 0.3125,
"acc_norm_stderr": 0.043994650575715215
},
"harness|hendrycksTest-management|5": {
"acc": 0.17475728155339806,
"acc_stderr": 0.037601780060266224,
"acc_norm": 0.17475728155339806,
"acc_norm_stderr": 0.037601780060266224
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.2905982905982906,
"acc_stderr": 0.02974504857267404,
"acc_norm": 0.2905982905982906,
"acc_norm_stderr": 0.02974504857267404
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.23754789272030652,
"acc_stderr": 0.015218733046150193,
"acc_norm": 0.23754789272030652,
"acc_norm_stderr": 0.015218733046150193
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.24855491329479767,
"acc_stderr": 0.023267528432100174,
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.023267528432100174
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.23798882681564246,
"acc_stderr": 0.014242630070574915,
"acc_norm": 0.23798882681564246,
"acc_norm_stderr": 0.014242630070574915
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.023929155517351284,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.023929155517351284
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.1864951768488746,
"acc_stderr": 0.02212243977248077,
"acc_norm": 0.1864951768488746,
"acc_norm_stderr": 0.02212243977248077
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.21604938271604937,
"acc_stderr": 0.022899162918445806,
"acc_norm": 0.21604938271604937,
"acc_norm_stderr": 0.022899162918445806
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.23404255319148937,
"acc_stderr": 0.025257861359432417,
"acc_norm": 0.23404255319148937,
"acc_norm_stderr": 0.025257861359432417
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.2457627118644068,
"acc_stderr": 0.010996156635142692,
"acc_norm": 0.2457627118644068,
"acc_norm_stderr": 0.010996156635142692
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.18382352941176472,
"acc_stderr": 0.023529242185193106,
"acc_norm": 0.18382352941176472,
"acc_norm_stderr": 0.023529242185193106
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.25,
"acc_stderr": 0.01751781884501444,
"acc_norm": 0.25,
"acc_norm_stderr": 0.01751781884501444
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.21818181818181817,
"acc_stderr": 0.03955932861795833,
"acc_norm": 0.21818181818181817,
"acc_norm_stderr": 0.03955932861795833
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.18775510204081633,
"acc_stderr": 0.02500025603954621,
"acc_norm": 0.18775510204081633,
"acc_norm_stderr": 0.02500025603954621
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.24378109452736318,
"acc_stderr": 0.03036049015401465,
"acc_norm": 0.24378109452736318,
"acc_norm_stderr": 0.03036049015401465
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|hendrycksTest-virology|5": {
"acc": 0.28313253012048195,
"acc_stderr": 0.03507295431370518,
"acc_norm": 0.28313253012048195,
"acc_norm_stderr": 0.03507295431370518
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.3216374269005848,
"acc_stderr": 0.03582529442573122,
"acc_norm": 0.3216374269005848,
"acc_norm_stderr": 0.03582529442573122
},
"harness|truthfulqa:mc|0": {
"mc1": 0.2558139534883721,
"mc1_stderr": 0.01527417621928336,
"mc2": 0.4375927514815572,
"mc2_stderr": 0.014730946208002945
},
"harness|winogrande|5": {
"acc": 0.5232833464877664,
"acc_stderr": 0.01403724130957364
},
"harness|drop|3": {
"em": 0.0,
"em_stderr": 0.0,
"f1": 0.004410654362416105,
"f1_stderr": 0.0003037121224665498
},
"harness|gsm8k|5": {
"acc": 0.0,
"acc_stderr": 0.0
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7042877078056335,
-0.8791098594665527,
0.3003915846347809,
0.2514384984970093,
-0.16114166378974915,
-0.03399147465825081,
0.023075811564922333,
-0.23326092958450317,
0.5867094993591309,
-0.0070138899609446526,
-0.49128735065460205,
-0.6905661821365356,
-0.4407522678375244,
0.2794795632... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
HuyenNguyen/common_voice_13_0_hi_pseudo_labelled | HuyenNguyen | 2023-11-18T16:03:24Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T16:03:24Z | 2023-11-18T16:03:24.000Z | 2023-11-18T16:03:24 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Modest0/vozes | Modest0 | 2023-11-18T16:15:13Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T16:15:13Z | 2023-11-18T16:04:15.000Z | 2023-11-18T16:04:15 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_openchat__openchat_3.5_public | open-llm-leaderboard | 2023-11-19T10:34:09Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T10:34:09Z | 2023-11-18T16:18:03.000Z | 2023-11-18T16:18:03 | ---
pretty_name: Evaluation run of openchat/openchat_3.5
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [openchat/openchat_3.5](https://huggingface.co/openchat/openchat_3.5) on the [Open\
\ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_openchat__openchat_3.5_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-19T10:30:18.054013](https://huggingface.co/datasets/open-llm-leaderboard/details_openchat__openchat_3.5_public/blob/main/results_2023-11-19T10-30-18.054013.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6253361427748827,\n\
\ \"acc_stderr\": 0.03243199538325514,\n \"acc_norm\": 0.6324168865850391,\n\
\ \"acc_norm_stderr\": 0.033117338974973515,\n \"mc1\": 0.3023255813953488,\n\
\ \"mc1_stderr\": 0.016077509266133036,\n \"mc2\": 0.4543017595862846,\n\
\ \"mc2_stderr\": 0.015109332514210328,\n \"em\": 0.0026216442953020135,\n\
\ \"em_stderr\": 0.0005236685642965895,\n \"f1\": 0.0692680369127516,\n\
\ \"f1_stderr\": 0.0014684205896877763\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5836177474402731,\n \"acc_stderr\": 0.014405618279436174,\n\
\ \"acc_norm\": 0.6245733788395904,\n \"acc_norm_stderr\": 0.014150631435111728\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6450906193985262,\n\
\ \"acc_stderr\": 0.0047750796365670966,\n \"acc_norm\": 0.839573790081657,\n\
\ \"acc_norm_stderr\": 0.003662508272330902\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \
\ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5851851851851851,\n\
\ \"acc_stderr\": 0.04256193767901408,\n \"acc_norm\": 0.5851851851851851,\n\
\ \"acc_norm_stderr\": 0.04256193767901408\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.03803510248351585,\n\
\ \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.03803510248351585\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n\
\ \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\"\
: 0.05\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"\
acc\": 0.690566037735849,\n \"acc_stderr\": 0.02845015479411864,\n \
\ \"acc_norm\": 0.690566037735849,\n \"acc_norm_stderr\": 0.02845015479411864\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7222222222222222,\n\
\ \"acc_stderr\": 0.03745554791462455,\n \"acc_norm\": 0.7222222222222222,\n\
\ \"acc_norm_stderr\": 0.03745554791462455\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \
\ \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\"\
: 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \
\ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n\
\ \"acc_stderr\": 0.036430371689585475,\n \"acc_norm\": 0.6473988439306358,\n\
\ \"acc_norm_stderr\": 0.036430371689585475\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.04897104952726366,\n\
\ \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.04897104952726366\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n\
\ \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5276595744680851,\n \"acc_stderr\": 0.03263597118409769,\n\
\ \"acc_norm\": 0.5276595744680851,\n \"acc_norm_stderr\": 0.03263597118409769\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n\
\ \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n\
\ \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n\
\ \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.4021164021164021,\n \"acc_stderr\": 0.02525303255499769,\n \"\
acc_norm\": 0.4021164021164021,\n \"acc_norm_stderr\": 0.02525303255499769\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5,\n\
\ \"acc_stderr\": 0.04472135954999579,\n \"acc_norm\": 0.5,\n \
\ \"acc_norm_stderr\": 0.04472135954999579\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7483870967741936,\n\
\ \"acc_stderr\": 0.024685979286239963,\n \"acc_norm\": 0.7483870967741936,\n\
\ \"acc_norm_stderr\": 0.024685979286239963\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.03517603540361008,\n\
\ \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.03517603540361008\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\"\
: 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.032250781083062896,\n\
\ \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.032250781083062896\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7676767676767676,\n \"acc_stderr\": 0.03008862949021749,\n \"\
acc_norm\": 0.7676767676767676,\n \"acc_norm_stderr\": 0.03008862949021749\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8652849740932642,\n \"acc_stderr\": 0.024639789097709447,\n\
\ \"acc_norm\": 0.8652849740932642,\n \"acc_norm_stderr\": 0.024639789097709447\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6307692307692307,\n \"acc_stderr\": 0.02446861524147893,\n \
\ \"acc_norm\": 0.6307692307692307,\n \"acc_norm_stderr\": 0.02446861524147893\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.37777777777777777,\n \"acc_stderr\": 0.02956070739246572,\n \
\ \"acc_norm\": 0.37777777777777777,\n \"acc_norm_stderr\": 0.02956070739246572\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6512605042016807,\n \"acc_stderr\": 0.030956636328566545,\n\
\ \"acc_norm\": 0.6512605042016807,\n \"acc_norm_stderr\": 0.030956636328566545\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.32450331125827814,\n \"acc_stderr\": 0.038227469376587525,\n \"\
acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.038227469376587525\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"\
acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.48148148148148145,\n \"acc_stderr\": 0.034076320938540516,\n \"\
acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.034076320938540516\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.7843137254901961,\n \"acc_stderr\": 0.028867431449849316,\n \"\
acc_norm\": 0.7843137254901961,\n \"acc_norm_stderr\": 0.028867431449849316\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.8185654008438819,\n \"acc_stderr\": 0.025085961144579647,\n \
\ \"acc_norm\": 0.8185654008438819,\n \"acc_norm_stderr\": 0.025085961144579647\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n\
\ \"acc_stderr\": 0.031024411740572206,\n \"acc_norm\": 0.6905829596412556,\n\
\ \"acc_norm_stderr\": 0.031024411740572206\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306085,\n\
\ \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306085\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7851239669421488,\n \"acc_stderr\": 0.03749492448709695,\n \"\
acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.03749492448709695\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n\
\ \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n\
\ \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7361963190184049,\n \"acc_stderr\": 0.03462419931615623,\n\
\ \"acc_norm\": 0.7361963190184049,\n \"acc_norm_stderr\": 0.03462419931615623\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n\
\ \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \
\ \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.03760178006026621,\n\
\ \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.03760178006026621\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8461538461538461,\n\
\ \"acc_stderr\": 0.023636873317489274,\n \"acc_norm\": 0.8461538461538461,\n\
\ \"acc_norm_stderr\": 0.023636873317489274\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8071519795657727,\n\
\ \"acc_stderr\": 0.014108533515757431,\n \"acc_norm\": 0.8071519795657727,\n\
\ \"acc_norm_stderr\": 0.014108533515757431\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7052023121387283,\n \"acc_stderr\": 0.024547617794803828,\n\
\ \"acc_norm\": 0.7052023121387283,\n \"acc_norm_stderr\": 0.024547617794803828\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3486033519553073,\n\
\ \"acc_stderr\": 0.01593748465668703,\n \"acc_norm\": 0.3486033519553073,\n\
\ \"acc_norm_stderr\": 0.01593748465668703\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.026787453111906504,\n\
\ \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.026787453111906504\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6945337620578779,\n\
\ \"acc_stderr\": 0.026160584450140446,\n \"acc_norm\": 0.6945337620578779,\n\
\ \"acc_norm_stderr\": 0.026160584450140446\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.02438366553103545,\n\
\ \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.02438366553103545\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \
\ \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4680573663624511,\n\
\ \"acc_stderr\": 0.012744149704869647,\n \"acc_norm\": 0.4680573663624511,\n\
\ \"acc_norm_stderr\": 0.012744149704869647\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.02873932851398357,\n\
\ \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.02873932851398357\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6405228758169934,\n \"acc_stderr\": 0.01941253924203216,\n \
\ \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.01941253924203216\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n\
\ \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n\
\ \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.6979591836734694,\n \"acc_stderr\": 0.029393609319879804,\n\
\ \"acc_norm\": 0.6979591836734694,\n \"acc_norm_stderr\": 0.029393609319879804\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8109452736318408,\n\
\ \"acc_stderr\": 0.027686913588013014,\n \"acc_norm\": 0.8109452736318408,\n\
\ \"acc_norm_stderr\": 0.027686913588013014\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \
\ \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n\
\ \"acc_stderr\": 0.03891364495835816,\n \"acc_norm\": 0.5120481927710844,\n\
\ \"acc_norm_stderr\": 0.03891364495835816\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n\
\ \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3023255813953488,\n\
\ \"mc1_stderr\": 0.016077509266133036,\n \"mc2\": 0.4543017595862846,\n\
\ \"mc2_stderr\": 0.015109332514210328\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.8105761641673244,\n \"acc_stderr\": 0.011012790432989243\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.0026216442953020135,\n \
\ \"em_stderr\": 0.0005236685642965895,\n \"f1\": 0.0692680369127516,\n\
\ \"f1_stderr\": 0.0014684205896877763\n },\n \"harness|gsm8k|5\":\
\ {\n \"acc\": 0.2577710386656558,\n \"acc_stderr\": 0.012048370213576602\n\
\ }\n}\n```"
repo_url: https://huggingface.co/openchat/openchat_3.5
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|arc:challenge|25_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|arc:challenge|25_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|arc:challenge|25_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|drop|3_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|drop|3_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|drop|3_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|gsm8k|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|gsm8k|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|gsm8k|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hellaswag|10_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hellaswag|10_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hellaswag|10_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T16-15-03.792286.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T16-22-29.903207.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-19T10-30-18.054013.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-19T10-30-18.054013.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- '**/details_harness|winogrande|5_2023-11-18T16-15-03.792286.parquet'
- split: 2023_11_18T16_22_29.903207
path:
- '**/details_harness|winogrande|5_2023-11-18T16-22-29.903207.parquet'
- split: 2023_11_19T10_30_18.054013
path:
- '**/details_harness|winogrande|5_2023-11-19T10-30-18.054013.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-19T10-30-18.054013.parquet'
- config_name: results
data_files:
- split: 2023_11_18T16_15_03.792286
path:
- results_2023-11-18T16-15-03.792286.parquet
- split: 2023_11_18T16_22_29.903207
path:
- results_2023-11-18T16-22-29.903207.parquet
- split: 2023_11_19T10_30_18.054013
path:
- results_2023-11-19T10-30-18.054013.parquet
- split: latest
path:
- results_2023-11-19T10-30-18.054013.parquet
---
# Dataset Card for Evaluation run of openchat/openchat_3.5
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/openchat/openchat_3.5
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [openchat/openchat_3.5](https://huggingface.co/openchat/openchat_3.5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_openchat__openchat_3.5_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-19T10:30:18.054013](https://huggingface.co/datasets/open-llm-leaderboard/details_openchat__openchat_3.5_public/blob/main/results_2023-11-19T10-30-18.054013.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6253361427748827,
"acc_stderr": 0.03243199538325514,
"acc_norm": 0.6324168865850391,
"acc_norm_stderr": 0.033117338974973515,
"mc1": 0.3023255813953488,
"mc1_stderr": 0.016077509266133036,
"mc2": 0.4543017595862846,
"mc2_stderr": 0.015109332514210328,
"em": 0.0026216442953020135,
"em_stderr": 0.0005236685642965895,
"f1": 0.0692680369127516,
"f1_stderr": 0.0014684205896877763
},
"harness|arc:challenge|25": {
"acc": 0.5836177474402731,
"acc_stderr": 0.014405618279436174,
"acc_norm": 0.6245733788395904,
"acc_norm_stderr": 0.014150631435111728
},
"harness|hellaswag|10": {
"acc": 0.6450906193985262,
"acc_stderr": 0.0047750796365670966,
"acc_norm": 0.839573790081657,
"acc_norm_stderr": 0.003662508272330902
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5851851851851851,
"acc_stderr": 0.04256193767901408,
"acc_norm": 0.5851851851851851,
"acc_norm_stderr": 0.04256193767901408
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6776315789473685,
"acc_stderr": 0.03803510248351585,
"acc_norm": 0.6776315789473685,
"acc_norm_stderr": 0.03803510248351585
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.55,
"acc_stderr": 0.05,
"acc_norm": 0.55,
"acc_norm_stderr": 0.05
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.690566037735849,
"acc_stderr": 0.02845015479411864,
"acc_norm": 0.690566037735849,
"acc_norm_stderr": 0.02845015479411864
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.03745554791462455,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.03745554791462455
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.47,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.47,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.47,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.47,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6473988439306358,
"acc_stderr": 0.036430371689585475,
"acc_norm": 0.6473988439306358,
"acc_norm_stderr": 0.036430371689585475
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.04897104952726366,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.04897104952726366
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5276595744680851,
"acc_stderr": 0.03263597118409769,
"acc_norm": 0.5276595744680851,
"acc_norm_stderr": 0.03263597118409769
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4473684210526316,
"acc_stderr": 0.04677473004491199,
"acc_norm": 0.4473684210526316,
"acc_norm_stderr": 0.04677473004491199
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5448275862068965,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.5448275862068965,
"acc_norm_stderr": 0.04149886942192117
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.4021164021164021,
"acc_stderr": 0.02525303255499769,
"acc_norm": 0.4021164021164021,
"acc_norm_stderr": 0.02525303255499769
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5,
"acc_stderr": 0.04472135954999579,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04472135954999579
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7483870967741936,
"acc_stderr": 0.024685979286239963,
"acc_norm": 0.7483870967741936,
"acc_norm_stderr": 0.024685979286239963
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.49261083743842365,
"acc_stderr": 0.03517603540361008,
"acc_norm": 0.49261083743842365,
"acc_norm_stderr": 0.03517603540361008
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.68,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.68,
"acc_norm_stderr": 0.04688261722621505
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7818181818181819,
"acc_stderr": 0.032250781083062896,
"acc_norm": 0.7818181818181819,
"acc_norm_stderr": 0.032250781083062896
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7676767676767676,
"acc_stderr": 0.03008862949021749,
"acc_norm": 0.7676767676767676,
"acc_norm_stderr": 0.03008862949021749
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8652849740932642,
"acc_stderr": 0.024639789097709447,
"acc_norm": 0.8652849740932642,
"acc_norm_stderr": 0.024639789097709447
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6307692307692307,
"acc_stderr": 0.02446861524147893,
"acc_norm": 0.6307692307692307,
"acc_norm_stderr": 0.02446861524147893
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.37777777777777777,
"acc_stderr": 0.02956070739246572,
"acc_norm": 0.37777777777777777,
"acc_norm_stderr": 0.02956070739246572
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6512605042016807,
"acc_stderr": 0.030956636328566545,
"acc_norm": 0.6512605042016807,
"acc_norm_stderr": 0.030956636328566545
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.32450331125827814,
"acc_stderr": 0.038227469376587525,
"acc_norm": 0.32450331125827814,
"acc_norm_stderr": 0.038227469376587525
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8477064220183487,
"acc_stderr": 0.015405084393157074,
"acc_norm": 0.8477064220183487,
"acc_norm_stderr": 0.015405084393157074
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.48148148148148145,
"acc_stderr": 0.034076320938540516,
"acc_norm": 0.48148148148148145,
"acc_norm_stderr": 0.034076320938540516
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7843137254901961,
"acc_stderr": 0.028867431449849316,
"acc_norm": 0.7843137254901961,
"acc_norm_stderr": 0.028867431449849316
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8185654008438819,
"acc_stderr": 0.025085961144579647,
"acc_norm": 0.8185654008438819,
"acc_norm_stderr": 0.025085961144579647
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6905829596412556,
"acc_stderr": 0.031024411740572206,
"acc_norm": 0.6905829596412556,
"acc_norm_stderr": 0.031024411740572206
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7480916030534351,
"acc_stderr": 0.03807387116306085,
"acc_norm": 0.7480916030534351,
"acc_norm_stderr": 0.03807387116306085
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7851239669421488,
"acc_stderr": 0.03749492448709695,
"acc_norm": 0.7851239669421488,
"acc_norm_stderr": 0.03749492448709695
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8055555555555556,
"acc_stderr": 0.038260763248848646,
"acc_norm": 0.8055555555555556,
"acc_norm_stderr": 0.038260763248848646
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7361963190184049,
"acc_stderr": 0.03462419931615623,
"acc_norm": 0.7361963190184049,
"acc_norm_stderr": 0.03462419931615623
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4375,
"acc_stderr": 0.04708567521880525,
"acc_norm": 0.4375,
"acc_norm_stderr": 0.04708567521880525
},
"harness|hendrycksTest-management|5": {
"acc": 0.8252427184466019,
"acc_stderr": 0.03760178006026621,
"acc_norm": 0.8252427184466019,
"acc_norm_stderr": 0.03760178006026621
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8461538461538461,
"acc_stderr": 0.023636873317489274,
"acc_norm": 0.8461538461538461,
"acc_norm_stderr": 0.023636873317489274
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8071519795657727,
"acc_stderr": 0.014108533515757431,
"acc_norm": 0.8071519795657727,
"acc_norm_stderr": 0.014108533515757431
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7052023121387283,
"acc_stderr": 0.024547617794803828,
"acc_norm": 0.7052023121387283,
"acc_norm_stderr": 0.024547617794803828
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.3486033519553073,
"acc_stderr": 0.01593748465668703,
"acc_norm": 0.3486033519553073,
"acc_norm_stderr": 0.01593748465668703
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.026787453111906504,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.026787453111906504
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6945337620578779,
"acc_stderr": 0.026160584450140446,
"acc_norm": 0.6945337620578779,
"acc_norm_stderr": 0.026160584450140446
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7407407407407407,
"acc_stderr": 0.02438366553103545,
"acc_norm": 0.7407407407407407,
"acc_norm_stderr": 0.02438366553103545
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4929078014184397,
"acc_stderr": 0.02982449855912901,
"acc_norm": 0.4929078014184397,
"acc_norm_stderr": 0.02982449855912901
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4680573663624511,
"acc_stderr": 0.012744149704869647,
"acc_norm": 0.4680573663624511,
"acc_norm_stderr": 0.012744149704869647
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6617647058823529,
"acc_stderr": 0.02873932851398357,
"acc_norm": 0.6617647058823529,
"acc_norm_stderr": 0.02873932851398357
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6405228758169934,
"acc_stderr": 0.01941253924203216,
"acc_norm": 0.6405228758169934,
"acc_norm_stderr": 0.01941253924203216
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6636363636363637,
"acc_stderr": 0.04525393596302506,
"acc_norm": 0.6636363636363637,
"acc_norm_stderr": 0.04525393596302506
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6979591836734694,
"acc_stderr": 0.029393609319879804,
"acc_norm": 0.6979591836734694,
"acc_norm_stderr": 0.029393609319879804
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8109452736318408,
"acc_stderr": 0.027686913588013014,
"acc_norm": 0.8109452736318408,
"acc_norm_stderr": 0.027686913588013014
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.9,
"acc_stderr": 0.030151134457776334,
"acc_norm": 0.9,
"acc_norm_stderr": 0.030151134457776334
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5120481927710844,
"acc_stderr": 0.03891364495835816,
"acc_norm": 0.5120481927710844,
"acc_norm_stderr": 0.03891364495835816
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8245614035087719,
"acc_stderr": 0.029170885500727665,
"acc_norm": 0.8245614035087719,
"acc_norm_stderr": 0.029170885500727665
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3023255813953488,
"mc1_stderr": 0.016077509266133036,
"mc2": 0.4543017595862846,
"mc2_stderr": 0.015109332514210328
},
"harness|winogrande|5": {
"acc": 0.8105761641673244,
"acc_stderr": 0.011012790432989243
},
"harness|drop|3": {
"em": 0.0026216442953020135,
"em_stderr": 0.0005236685642965895,
"f1": 0.0692680369127516,
"f1_stderr": 0.0014684205896877763
},
"harness|gsm8k|5": {
"acc": 0.2577710386656558,
"acc_stderr": 0.012048370213576602
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7132333517074585,
-0.8945950269699097,
0.2544645667076111,
0.2280852049589157,
-0.17217376828193665,
-0.07494383305311203,
-0.030626924708485603,
-0.23850595951080322,
0.5705156922340393,
-0.041431646794080734,
-0.4749257266521454,
-0.7151786684989929,
-0.38420844078063965,
0.1940735131... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
rodrfons/fingpt_chatglm2_sentiment_instruction_lora_ft_dataset | rodrfons | 2023-11-18T16:36:22Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T16:36:22Z | 2023-11-18T16:36:19.000Z | 2023-11-18T16:36:19 | ---
dataset_info:
features:
- name: input
dtype: string
- name: output
dtype: string
- name: instruction
dtype: string
splits:
- name: train
num_bytes: 18540941.869938433
num_examples: 76772
download_size: 6417302
dataset_size: 18540941.869938433
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "fingpt_chatglm2_sentiment_instruction_lora_ft_dataset"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | [
-0.5949644446372986,
-0.6748525500297546,
0.015731249004602432,
0.3558419346809387,
-0.23906747996807098,
0.008834334090352058,
0.06814907491207123,
-0.05067850276827812,
0.6695244312286377,
0.5813659429550171,
-0.907252848148346,
-0.8475595712661743,
-0.617760419845581,
-0.638087868690490... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
scrapalot/Techie_Filtered_Meta | scrapalot | 2023-11-18T17:21:18Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T17:21:18Z | 2023-11-18T17:09:39.000Z | 2023-11-18T17:09:39 | ---
dataset_info:
features:
- name: lens_id
dtype: string
- name: title
dtype: string
- name: publication_type
dtype: string
- name: year_published
dtype: float32
- name: date_published
dtype: string
- name: date_published_parts
sequence: int64
- name: created
dtype: string
- name: external_ids
list:
- name: type
dtype: string
- name: value
dtype: string
- name: open_access
struct:
- name: colour
dtype: string
- name: license
dtype: string
- name: authors
list:
- name: affiliations
list:
- name: country_code
dtype: string
- name: grid_id
dtype: string
- name: ids
list:
- name: type
dtype: string
- name: value
dtype: string
- name: name
dtype: string
- name: name_original
dtype: string
- name: collective_name
dtype: string
- name: first_name
dtype: string
- name: ids
list:
- name: type
dtype: string
- name: value
dtype: string
- name: initials
dtype: string
- name: last_name
dtype: string
- name: source
struct:
- name: asjc_codes
sequence: string
- name: asjc_subjects
sequence: string
- name: country
dtype: string
- name: issn
list:
- name: type
dtype: string
- name: value
dtype: string
- name: publisher
dtype: string
- name: title
dtype: string
- name: type
dtype: string
- name: fields_of_study
sequence: string
- name: languages
sequence: string
- name: start_page
dtype: string
- name: end_page
dtype: string
- name: author_count
dtype: float64
- name: is_open_access
dtype: bool
- name: source_urls
list:
- name: type
dtype: string
- name: url
dtype: string
- name: abstract
dtype: string
- name: references
list:
- name: lens_id
dtype: string
- name: references_count
dtype: float64
- name: scholarly_citations_count
dtype: float64
- name: scholarly_citations
sequence: string
- name: patent_citations
list:
- name: lens_id
dtype: string
- name: patent_citations_count
dtype: float64
- name: issue
dtype: string
- name: publication_supplementary_type
sequence: string
- name: volume
dtype: string
- name: conference
struct:
- name: instance
dtype: string
- name: location
dtype: string
- name: name
dtype: string
- name: mesh_terms
list:
- name: mesh_heading
dtype: string
- name: mesh_id
dtype: string
- name: qualifier_id
dtype: string
- name: qualifier_name
dtype: string
- name: chemicals
list:
- name: mesh_id
dtype: string
- name: registry_number
dtype: string
- name: substance_name
dtype: string
- name: keywords
sequence: string
- name: funding
list:
- name: country
dtype: string
- name: funding_id
dtype: string
- name: org
dtype: string
- name: clinical_trials
list:
- name: id
dtype: string
- name: registry
dtype: string
- name: pdf_urls
sequence: string
- name: domain
sequence: string
splits:
- name: train
num_bytes: 726171677.8835785
num_examples: 423860
download_size: 619298847
dataset_size: 726171677.8835785
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Richarth/me-llama | Richarth | 2023-11-18T17:14:36Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T17:14:36Z | 2023-11-18T17:14:36.000Z | 2023-11-18T17:14:36 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_ajibawa-2023__Python-Code-13B_public | open-llm-leaderboard | 2023-11-18T17:51:03Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T17:51:03Z | 2023-11-18T17:50:14.000Z | 2023-11-18T17:50:14 | ---
pretty_name: Evaluation run of ajibawa-2023/Python-Code-13B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [ajibawa-2023/Python-Code-13B](https://huggingface.co/ajibawa-2023/Python-Code-13B)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ajibawa-2023__Python-Code-13B_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T17:47:08.897776](https://huggingface.co/datasets/open-llm-leaderboard/details_ajibawa-2023__Python-Code-13B_public/blob/main/results_2023-11-18T17-47-08.897776.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5448646670706938,\n\
\ \"acc_stderr\": 0.03364449787116653,\n \"acc_norm\": 0.553017962738193,\n\
\ \"acc_norm_stderr\": 0.03441995141099888,\n \"mc1\": 0.3108935128518972,\n\
\ \"mc1_stderr\": 0.016203316673559696,\n \"mc2\": 0.42826268252930766,\n\
\ \"mc2_stderr\": 0.015905372852037223,\n \"em\": 0.02149748322147651,\n\
\ \"em_stderr\": 0.001485300865621995,\n \"f1\": 0.08503041107382545,\n\
\ \"f1_stderr\": 0.0019611908757143598\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5469283276450512,\n \"acc_stderr\": 0.014546892052005628,\n\
\ \"acc_norm\": 0.5878839590443686,\n \"acc_norm_stderr\": 0.014383915302225405\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6303525194184425,\n\
\ \"acc_stderr\": 0.00481722729224028,\n \"acc_norm\": 0.8165704043019318,\n\
\ \"acc_norm_stderr\": 0.0038622736265045477\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.45185185185185184,\n\
\ \"acc_stderr\": 0.04299268905480863,\n \"acc_norm\": 0.45185185185185184,\n\
\ \"acc_norm_stderr\": 0.04299268905480863\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.5131578947368421,\n \"acc_stderr\": 0.04067533136309173,\n\
\ \"acc_norm\": 0.5131578947368421,\n \"acc_norm_stderr\": 0.04067533136309173\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.5,\n\
\ \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \
\ \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6037735849056604,\n \"acc_stderr\": 0.030102793781791197,\n\
\ \"acc_norm\": 0.6037735849056604,\n \"acc_norm_stderr\": 0.030102793781791197\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5833333333333334,\n\
\ \"acc_stderr\": 0.04122728707651282,\n \"acc_norm\": 0.5833333333333334,\n\
\ \"acc_norm_stderr\": 0.04122728707651282\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \
\ \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\"\
: 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5260115606936416,\n\
\ \"acc_stderr\": 0.03807301726504513,\n \"acc_norm\": 0.5260115606936416,\n\
\ \"acc_norm_stderr\": 0.03807301726504513\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.04389869956808778,\n\
\ \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.04389869956808778\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n\
\ \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.425531914893617,\n \"acc_stderr\": 0.03232146916224468,\n\
\ \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.03232146916224468\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.30701754385964913,\n\
\ \"acc_stderr\": 0.04339138322579861,\n \"acc_norm\": 0.30701754385964913,\n\
\ \"acc_norm_stderr\": 0.04339138322579861\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.4689655172413793,\n \"acc_stderr\": 0.04158632762097828,\n\
\ \"acc_norm\": 0.4689655172413793,\n \"acc_norm_stderr\": 0.04158632762097828\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.3253968253968254,\n \"acc_stderr\": 0.024130158299762613,\n \"\
acc_norm\": 0.3253968253968254,\n \"acc_norm_stderr\": 0.024130158299762613\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n\
\ \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n\
\ \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \
\ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\
\ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6290322580645161,\n\
\ \"acc_stderr\": 0.027480541887953593,\n \"acc_norm\": 0.6290322580645161,\n\
\ \"acc_norm_stderr\": 0.027480541887953593\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.03481904844438804,\n\
\ \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.03481904844438804\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\"\
: 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.6484848484848484,\n \"acc_stderr\": 0.037282069986826503,\n\
\ \"acc_norm\": 0.6484848484848484,\n \"acc_norm_stderr\": 0.037282069986826503\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.6818181818181818,\n \"acc_stderr\": 0.0331847733384533,\n \"acc_norm\"\
: 0.6818181818181818,\n \"acc_norm_stderr\": 0.0331847733384533\n },\n\
\ \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \
\ \"acc\": 0.8031088082901554,\n \"acc_stderr\": 0.028697873971860677,\n\
\ \"acc_norm\": 0.8031088082901554,\n \"acc_norm_stderr\": 0.028697873971860677\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.5102564102564102,\n \"acc_stderr\": 0.025345672221942374,\n\
\ \"acc_norm\": 0.5102564102564102,\n \"acc_norm_stderr\": 0.025345672221942374\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.2962962962962963,\n \"acc_stderr\": 0.027840811495871923,\n \
\ \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.027840811495871923\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.542016806722689,\n \"acc_stderr\": 0.032363611119519416,\n \
\ \"acc_norm\": 0.542016806722689,\n \"acc_norm_stderr\": 0.032363611119519416\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"\
acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.7321100917431193,\n \"acc_stderr\": 0.018987462257978652,\n \"\
acc_norm\": 0.7321100917431193,\n \"acc_norm_stderr\": 0.018987462257978652\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.4074074074074074,\n \"acc_stderr\": 0.03350991604696042,\n \"\
acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.03350991604696042\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.7549019607843137,\n \"acc_stderr\": 0.030190282453501947,\n \"\
acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.030190282453501947\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7257383966244726,\n \"acc_stderr\": 0.029041333510598014,\n \
\ \"acc_norm\": 0.7257383966244726,\n \"acc_norm_stderr\": 0.029041333510598014\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6547085201793722,\n\
\ \"acc_stderr\": 0.03191100192835795,\n \"acc_norm\": 0.6547085201793722,\n\
\ \"acc_norm_stderr\": 0.03191100192835795\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n\
\ \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.71900826446281,\n \"acc_stderr\": 0.041032038305145124,\n \"acc_norm\"\
: 0.71900826446281,\n \"acc_norm_stderr\": 0.041032038305145124\n },\n\
\ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n\
\ \"acc_stderr\": 0.04453197507374983,\n \"acc_norm\": 0.6944444444444444,\n\
\ \"acc_norm_stderr\": 0.04453197507374983\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.656441717791411,\n \"acc_stderr\": 0.037311335196738925,\n\
\ \"acc_norm\": 0.656441717791411,\n \"acc_norm_stderr\": 0.037311335196738925\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n\
\ \"acc_stderr\": 0.04287858751340456,\n \"acc_norm\": 0.2857142857142857,\n\
\ \"acc_norm_stderr\": 0.04287858751340456\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n\
\ \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7948717948717948,\n\
\ \"acc_stderr\": 0.026453508054040332,\n \"acc_norm\": 0.7948717948717948,\n\
\ \"acc_norm_stderr\": 0.026453508054040332\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \
\ \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7458492975734355,\n\
\ \"acc_stderr\": 0.015569254692045757,\n \"acc_norm\": 0.7458492975734355,\n\
\ \"acc_norm_stderr\": 0.015569254692045757\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.02607431485165708,\n\
\ \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.02607431485165708\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.376536312849162,\n\
\ \"acc_stderr\": 0.016204672385106606,\n \"acc_norm\": 0.376536312849162,\n\
\ \"acc_norm_stderr\": 0.016204672385106606\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.6339869281045751,\n \"acc_stderr\": 0.027582811415159607,\n\
\ \"acc_norm\": 0.6339869281045751,\n \"acc_norm_stderr\": 0.027582811415159607\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6237942122186495,\n\
\ \"acc_stderr\": 0.02751392568354943,\n \"acc_norm\": 0.6237942122186495,\n\
\ \"acc_norm_stderr\": 0.02751392568354943\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.6049382716049383,\n \"acc_stderr\": 0.027201117666925654,\n\
\ \"acc_norm\": 0.6049382716049383,\n \"acc_norm_stderr\": 0.027201117666925654\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.3900709219858156,\n \"acc_stderr\": 0.02909767559946393,\n \
\ \"acc_norm\": 0.3900709219858156,\n \"acc_norm_stderr\": 0.02909767559946393\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41851368970013036,\n\
\ \"acc_stderr\": 0.012599505608336456,\n \"acc_norm\": 0.41851368970013036,\n\
\ \"acc_norm_stderr\": 0.012599505608336456\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.5257352941176471,\n \"acc_stderr\": 0.030332578094555033,\n\
\ \"acc_norm\": 0.5257352941176471,\n \"acc_norm_stderr\": 0.030332578094555033\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.5718954248366013,\n \"acc_stderr\": 0.020017629214213097,\n \
\ \"acc_norm\": 0.5718954248366013,\n \"acc_norm_stderr\": 0.020017629214213097\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5909090909090909,\n\
\ \"acc_stderr\": 0.04709306978661896,\n \"acc_norm\": 0.5909090909090909,\n\
\ \"acc_norm_stderr\": 0.04709306978661896\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.6326530612244898,\n \"acc_stderr\": 0.030862144921087555,\n\
\ \"acc_norm\": 0.6326530612244898,\n \"acc_norm_stderr\": 0.030862144921087555\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7611940298507462,\n\
\ \"acc_stderr\": 0.030147775935409217,\n \"acc_norm\": 0.7611940298507462,\n\
\ \"acc_norm_stderr\": 0.030147775935409217\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197769,\n \
\ \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197769\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4819277108433735,\n\
\ \"acc_stderr\": 0.038899512528272166,\n \"acc_norm\": 0.4819277108433735,\n\
\ \"acc_norm_stderr\": 0.038899512528272166\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.031885780176863984,\n\
\ \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.031885780176863984\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3108935128518972,\n\
\ \"mc1_stderr\": 0.016203316673559696,\n \"mc2\": 0.42826268252930766,\n\
\ \"mc2_stderr\": 0.015905372852037223\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7403314917127072,\n \"acc_stderr\": 0.012322700705552667\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.02149748322147651,\n \
\ \"em_stderr\": 0.001485300865621995,\n \"f1\": 0.08503041107382545,\n\
\ \"f1_stderr\": 0.0019611908757143598\n },\n \"harness|gsm8k|5\":\
\ {\n \"acc\": 0.09552691432903715,\n \"acc_stderr\": 0.008096605771155745\n\
\ }\n}\n```"
repo_url: https://huggingface.co/ajibawa-2023/Python-Code-13B
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|arc:challenge|25_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|drop|3_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|gsm8k|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hellaswag|10_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T17-47-08.897776.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T17-47-08.897776.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- '**/details_harness|winogrande|5_2023-11-18T17-47-08.897776.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T17-47-08.897776.parquet'
- config_name: results
data_files:
- split: 2023_11_18T17_47_08.897776
path:
- results_2023-11-18T17-47-08.897776.parquet
- split: latest
path:
- results_2023-11-18T17-47-08.897776.parquet
---
# Dataset Card for Evaluation run of ajibawa-2023/Python-Code-13B
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/ajibawa-2023/Python-Code-13B
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [ajibawa-2023/Python-Code-13B](https://huggingface.co/ajibawa-2023/Python-Code-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_ajibawa-2023__Python-Code-13B_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T17:47:08.897776](https://huggingface.co/datasets/open-llm-leaderboard/details_ajibawa-2023__Python-Code-13B_public/blob/main/results_2023-11-18T17-47-08.897776.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.5448646670706938,
"acc_stderr": 0.03364449787116653,
"acc_norm": 0.553017962738193,
"acc_norm_stderr": 0.03441995141099888,
"mc1": 0.3108935128518972,
"mc1_stderr": 0.016203316673559696,
"mc2": 0.42826268252930766,
"mc2_stderr": 0.015905372852037223,
"em": 0.02149748322147651,
"em_stderr": 0.001485300865621995,
"f1": 0.08503041107382545,
"f1_stderr": 0.0019611908757143598
},
"harness|arc:challenge|25": {
"acc": 0.5469283276450512,
"acc_stderr": 0.014546892052005628,
"acc_norm": 0.5878839590443686,
"acc_norm_stderr": 0.014383915302225405
},
"harness|hellaswag|10": {
"acc": 0.6303525194184425,
"acc_stderr": 0.00481722729224028,
"acc_norm": 0.8165704043019318,
"acc_norm_stderr": 0.0038622736265045477
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.45185185185185184,
"acc_stderr": 0.04299268905480863,
"acc_norm": 0.45185185185185184,
"acc_norm_stderr": 0.04299268905480863
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.5131578947368421,
"acc_stderr": 0.04067533136309173,
"acc_norm": 0.5131578947368421,
"acc_norm_stderr": 0.04067533136309173
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6037735849056604,
"acc_stderr": 0.030102793781791197,
"acc_norm": 0.6037735849056604,
"acc_norm_stderr": 0.030102793781791197
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.5833333333333334,
"acc_stderr": 0.04122728707651282,
"acc_norm": 0.5833333333333334,
"acc_norm_stderr": 0.04122728707651282
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5260115606936416,
"acc_stderr": 0.03807301726504513,
"acc_norm": 0.5260115606936416,
"acc_norm_stderr": 0.03807301726504513
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.04389869956808778,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.04389869956808778
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.66,
"acc_stderr": 0.04760952285695237,
"acc_norm": 0.66,
"acc_norm_stderr": 0.04760952285695237
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.425531914893617,
"acc_stderr": 0.03232146916224468,
"acc_norm": 0.425531914893617,
"acc_norm_stderr": 0.03232146916224468
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.30701754385964913,
"acc_stderr": 0.04339138322579861,
"acc_norm": 0.30701754385964913,
"acc_norm_stderr": 0.04339138322579861
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.4689655172413793,
"acc_stderr": 0.04158632762097828,
"acc_norm": 0.4689655172413793,
"acc_norm_stderr": 0.04158632762097828
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3253968253968254,
"acc_stderr": 0.024130158299762613,
"acc_norm": 0.3253968253968254,
"acc_norm_stderr": 0.024130158299762613
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.31746031746031744,
"acc_stderr": 0.04163453031302859,
"acc_norm": 0.31746031746031744,
"acc_norm_stderr": 0.04163453031302859
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.6290322580645161,
"acc_stderr": 0.027480541887953593,
"acc_norm": 0.6290322580645161,
"acc_norm_stderr": 0.027480541887953593
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.03481904844438804,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.03481904844438804
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.53,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.6484848484848484,
"acc_stderr": 0.037282069986826503,
"acc_norm": 0.6484848484848484,
"acc_norm_stderr": 0.037282069986826503
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.6818181818181818,
"acc_stderr": 0.0331847733384533,
"acc_norm": 0.6818181818181818,
"acc_norm_stderr": 0.0331847733384533
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8031088082901554,
"acc_stderr": 0.028697873971860677,
"acc_norm": 0.8031088082901554,
"acc_norm_stderr": 0.028697873971860677
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.5102564102564102,
"acc_stderr": 0.025345672221942374,
"acc_norm": 0.5102564102564102,
"acc_norm_stderr": 0.025345672221942374
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.027840811495871923,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.027840811495871923
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.542016806722689,
"acc_stderr": 0.032363611119519416,
"acc_norm": 0.542016806722689,
"acc_norm_stderr": 0.032363611119519416
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.038020397601079024,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.038020397601079024
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7321100917431193,
"acc_stderr": 0.018987462257978652,
"acc_norm": 0.7321100917431193,
"acc_norm_stderr": 0.018987462257978652
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.03350991604696042,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.03350991604696042
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7549019607843137,
"acc_stderr": 0.030190282453501947,
"acc_norm": 0.7549019607843137,
"acc_norm_stderr": 0.030190282453501947
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7257383966244726,
"acc_stderr": 0.029041333510598014,
"acc_norm": 0.7257383966244726,
"acc_norm_stderr": 0.029041333510598014
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6547085201793722,
"acc_stderr": 0.03191100192835795,
"acc_norm": 0.6547085201793722,
"acc_norm_stderr": 0.03191100192835795
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6259541984732825,
"acc_stderr": 0.042438692422305246,
"acc_norm": 0.6259541984732825,
"acc_norm_stderr": 0.042438692422305246
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.71900826446281,
"acc_stderr": 0.041032038305145124,
"acc_norm": 0.71900826446281,
"acc_norm_stderr": 0.041032038305145124
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.6944444444444444,
"acc_stderr": 0.04453197507374983,
"acc_norm": 0.6944444444444444,
"acc_norm_stderr": 0.04453197507374983
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.656441717791411,
"acc_stderr": 0.037311335196738925,
"acc_norm": 0.656441717791411,
"acc_norm_stderr": 0.037311335196738925
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.04287858751340456,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.04287858751340456
},
"harness|hendrycksTest-management|5": {
"acc": 0.7475728155339806,
"acc_stderr": 0.04301250399690878,
"acc_norm": 0.7475728155339806,
"acc_norm_stderr": 0.04301250399690878
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.7948717948717948,
"acc_stderr": 0.026453508054040332,
"acc_norm": 0.7948717948717948,
"acc_norm_stderr": 0.026453508054040332
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.59,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.59,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7458492975734355,
"acc_stderr": 0.015569254692045757,
"acc_norm": 0.7458492975734355,
"acc_norm_stderr": 0.015569254692045757
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6242774566473989,
"acc_stderr": 0.02607431485165708,
"acc_norm": 0.6242774566473989,
"acc_norm_stderr": 0.02607431485165708
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.376536312849162,
"acc_stderr": 0.016204672385106606,
"acc_norm": 0.376536312849162,
"acc_norm_stderr": 0.016204672385106606
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6339869281045751,
"acc_stderr": 0.027582811415159607,
"acc_norm": 0.6339869281045751,
"acc_norm_stderr": 0.027582811415159607
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6237942122186495,
"acc_stderr": 0.02751392568354943,
"acc_norm": 0.6237942122186495,
"acc_norm_stderr": 0.02751392568354943
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.6049382716049383,
"acc_stderr": 0.027201117666925654,
"acc_norm": 0.6049382716049383,
"acc_norm_stderr": 0.027201117666925654
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.3900709219858156,
"acc_stderr": 0.02909767559946393,
"acc_norm": 0.3900709219858156,
"acc_norm_stderr": 0.02909767559946393
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.41851368970013036,
"acc_stderr": 0.012599505608336456,
"acc_norm": 0.41851368970013036,
"acc_norm_stderr": 0.012599505608336456
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.5257352941176471,
"acc_stderr": 0.030332578094555033,
"acc_norm": 0.5257352941176471,
"acc_norm_stderr": 0.030332578094555033
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.5718954248366013,
"acc_stderr": 0.020017629214213097,
"acc_norm": 0.5718954248366013,
"acc_norm_stderr": 0.020017629214213097
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.5909090909090909,
"acc_stderr": 0.04709306978661896,
"acc_norm": 0.5909090909090909,
"acc_norm_stderr": 0.04709306978661896
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6326530612244898,
"acc_stderr": 0.030862144921087555,
"acc_norm": 0.6326530612244898,
"acc_norm_stderr": 0.030862144921087555
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.7611940298507462,
"acc_stderr": 0.030147775935409217,
"acc_norm": 0.7611940298507462,
"acc_norm_stderr": 0.030147775935409217
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.86,
"acc_stderr": 0.03487350880197769,
"acc_norm": 0.86,
"acc_norm_stderr": 0.03487350880197769
},
"harness|hendrycksTest-virology|5": {
"acc": 0.4819277108433735,
"acc_stderr": 0.038899512528272166,
"acc_norm": 0.4819277108433735,
"acc_norm_stderr": 0.038899512528272166
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.031885780176863984,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.031885780176863984
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3108935128518972,
"mc1_stderr": 0.016203316673559696,
"mc2": 0.42826268252930766,
"mc2_stderr": 0.015905372852037223
},
"harness|winogrande|5": {
"acc": 0.7403314917127072,
"acc_stderr": 0.012322700705552667
},
"harness|drop|3": {
"em": 0.02149748322147651,
"em_stderr": 0.001485300865621995,
"f1": 0.08503041107382545,
"f1_stderr": 0.0019611908757143598
},
"harness|gsm8k|5": {
"acc": 0.09552691432903715,
"acc_stderr": 0.008096605771155745
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7417489290237427,
-0.8677587509155273,
0.2688010334968567,
0.2470759153366089,
-0.15433338284492493,
-0.08705633133649826,
0.0018880238058045506,
-0.24560546875,
0.5675190091133118,
-0.05197510868310928,
-0.4973575472831726,
-0.6884101033210754,
-0.4482797682285309,
0.25404754281044006,... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
sohaibchachar/Drone | sohaibchachar | 2023-11-19T22:55:59Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T22:55:59Z | 2023-11-18T17:58:22.000Z | 2023-11-18T17:58:22 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
MoonIcee/joao | MoonIcee | 2023-11-18T17:59:10Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-18T17:59:10Z | 2023-11-18T17:59:10.000Z | 2023-11-18T17:59:10 | ---
license: openrail
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_ajibawa-2023__Uncensored-Jordan-13B_public | open-llm-leaderboard | 2023-11-18T18:05:16Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T18:05:16Z | 2023-11-18T18:04:27.000Z | 2023-11-18T18:04:27 | ---
pretty_name: Evaluation run of ajibawa-2023/Uncensored-Jordan-13B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [ajibawa-2023/Uncensored-Jordan-13B](https://huggingface.co/ajibawa-2023/Uncensored-Jordan-13B)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ajibawa-2023__Uncensored-Jordan-13B_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T18:01:22.350849](https://huggingface.co/datasets/open-llm-leaderboard/details_ajibawa-2023__Uncensored-Jordan-13B_public/blob/main/results_2023-11-18T18-01-22.350849.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5551346436733366,\n\
\ \"acc_stderr\": 0.033773935379363566,\n \"acc_norm\": 0.5623156588862028,\n\
\ \"acc_norm_stderr\": 0.03452935511879212,\n \"mc1\": 0.35006119951040393,\n\
\ \"mc1_stderr\": 0.01669794942015103,\n \"mc2\": 0.5051246541228124,\n\
\ \"mc2_stderr\": 0.015683474268697605,\n \"em\": 0.10371224832214765,\n\
\ \"em_stderr\": 0.003122327158910168,\n \"f1\": 0.1647325922818787,\n\
\ \"f1_stderr\": 0.003269141000174996\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5401023890784983,\n \"acc_stderr\": 0.014564318856924848,\n\
\ \"acc_norm\": 0.5742320819112628,\n \"acc_norm_stderr\": 0.01444946427886881\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6352320254929297,\n\
\ \"acc_stderr\": 0.004803812631994952,\n \"acc_norm\": 0.8270264887472615,\n\
\ \"acc_norm_stderr\": 0.0037745138826159514\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\
\ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n\
\ \"acc_stderr\": 0.04313531696750574,\n \"acc_norm\": 0.4740740740740741,\n\
\ \"acc_norm_stderr\": 0.04313531696750574\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.5657894736842105,\n \"acc_stderr\": 0.04033565667848319,\n\
\ \"acc_norm\": 0.5657894736842105,\n \"acc_norm_stderr\": 0.04033565667848319\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n\
\ \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \
\ \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6075471698113207,\n \"acc_stderr\": 0.03005258057955785,\n\
\ \"acc_norm\": 0.6075471698113207,\n \"acc_norm_stderr\": 0.03005258057955785\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5833333333333334,\n\
\ \"acc_stderr\": 0.041227287076512825,\n \"acc_norm\": 0.5833333333333334,\n\
\ \"acc_norm_stderr\": 0.041227287076512825\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \
\ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n\
\ \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\
\ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5317919075144508,\n\
\ \"acc_stderr\": 0.03804749744364764,\n \"acc_norm\": 0.5317919075144508,\n\
\ \"acc_norm_stderr\": 0.03804749744364764\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201942,\n\
\ \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201942\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n\
\ \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.4851063829787234,\n \"acc_stderr\": 0.032671518489247764,\n\
\ \"acc_norm\": 0.4851063829787234,\n \"acc_norm_stderr\": 0.032671518489247764\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.21929824561403508,\n\
\ \"acc_stderr\": 0.03892431106518754,\n \"acc_norm\": 0.21929824561403508,\n\
\ \"acc_norm_stderr\": 0.03892431106518754\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n\
\ \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.31746031746031744,\n \"acc_stderr\": 0.023973861998992065,\n \"\
acc_norm\": 0.31746031746031744,\n \"acc_norm_stderr\": 0.023973861998992065\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.373015873015873,\n\
\ \"acc_stderr\": 0.043255060420170854,\n \"acc_norm\": 0.373015873015873,\n\
\ \"acc_norm_stderr\": 0.043255060420170854\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \
\ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\
\ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6516129032258065,\n\
\ \"acc_stderr\": 0.027104826328100944,\n \"acc_norm\": 0.6516129032258065,\n\
\ \"acc_norm_stderr\": 0.027104826328100944\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.41379310344827586,\n \"acc_stderr\": 0.03465304488406795,\n\
\ \"acc_norm\": 0.41379310344827586,\n \"acc_norm_stderr\": 0.03465304488406795\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.55,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\"\
: 0.55,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.03663974994391244,\n\
\ \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.03663974994391244\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.6919191919191919,\n \"acc_stderr\": 0.032894773300986155,\n \"\
acc_norm\": 0.6919191919191919,\n \"acc_norm_stderr\": 0.032894773300986155\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8031088082901554,\n \"acc_stderr\": 0.028697873971860688,\n\
\ \"acc_norm\": 0.8031088082901554,\n \"acc_norm_stderr\": 0.028697873971860688\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.4897435897435897,\n \"acc_stderr\": 0.025345672221942374,\n\
\ \"acc_norm\": 0.4897435897435897,\n \"acc_norm_stderr\": 0.025345672221942374\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3037037037037037,\n \"acc_stderr\": 0.02803792996911499,\n \
\ \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.02803792996911499\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.032252942323996406,\n\
\ \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.032252942323996406\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.304635761589404,\n \"acc_stderr\": 0.037579499229433426,\n \"\
acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.037579499229433426\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.7394495412844037,\n \"acc_stderr\": 0.01881918203485007,\n \"\
acc_norm\": 0.7394495412844037,\n \"acc_norm_stderr\": 0.01881918203485007\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.37037037037037035,\n \"acc_stderr\": 0.03293377139415191,\n \"\
acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.03293377139415191\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.7794117647058824,\n \"acc_stderr\": 0.02910225438967408,\n \"\
acc_norm\": 0.7794117647058824,\n \"acc_norm_stderr\": 0.02910225438967408\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.729957805907173,\n \"acc_stderr\": 0.028900721906293426,\n \
\ \"acc_norm\": 0.729957805907173,\n \"acc_norm_stderr\": 0.028900721906293426\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6681614349775785,\n\
\ \"acc_stderr\": 0.031602951437766785,\n \"acc_norm\": 0.6681614349775785,\n\
\ \"acc_norm_stderr\": 0.031602951437766785\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n\
\ \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7272727272727273,\n \"acc_stderr\": 0.04065578140908705,\n \"\
acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04065578140908705\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6388888888888888,\n\
\ \"acc_stderr\": 0.04643454608906275,\n \"acc_norm\": 0.6388888888888888,\n\
\ \"acc_norm_stderr\": 0.04643454608906275\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.6687116564417178,\n \"acc_stderr\": 0.03697983910025588,\n\
\ \"acc_norm\": 0.6687116564417178,\n \"acc_norm_stderr\": 0.03697983910025588\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n\
\ \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \
\ \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.6699029126213593,\n \"acc_stderr\": 0.0465614711001235,\n\
\ \"acc_norm\": 0.6699029126213593,\n \"acc_norm_stderr\": 0.0465614711001235\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8376068376068376,\n\
\ \"acc_stderr\": 0.02416161812798774,\n \"acc_norm\": 0.8376068376068376,\n\
\ \"acc_norm_stderr\": 0.02416161812798774\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \
\ \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n\
\ \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7573435504469987,\n\
\ \"acc_stderr\": 0.01532988894089986,\n \"acc_norm\": 0.7573435504469987,\n\
\ \"acc_norm_stderr\": 0.01532988894089986\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.630057803468208,\n \"acc_stderr\": 0.02599247202930639,\n\
\ \"acc_norm\": 0.630057803468208,\n \"acc_norm_stderr\": 0.02599247202930639\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.41899441340782123,\n\
\ \"acc_stderr\": 0.016501579306861677,\n \"acc_norm\": 0.41899441340782123,\n\
\ \"acc_norm_stderr\": 0.016501579306861677\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.630718954248366,\n \"acc_stderr\": 0.02763417668960266,\n\
\ \"acc_norm\": 0.630718954248366,\n \"acc_norm_stderr\": 0.02763417668960266\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6366559485530546,\n\
\ \"acc_stderr\": 0.027316847674192707,\n \"acc_norm\": 0.6366559485530546,\n\
\ \"acc_norm_stderr\": 0.027316847674192707\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.6419753086419753,\n \"acc_stderr\": 0.02667561192603709,\n\
\ \"acc_norm\": 0.6419753086419753,\n \"acc_norm_stderr\": 0.02667561192603709\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.39361702127659576,\n \"acc_stderr\": 0.029144544781596136,\n \
\ \"acc_norm\": 0.39361702127659576,\n \"acc_norm_stderr\": 0.029144544781596136\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4165580182529335,\n\
\ \"acc_stderr\": 0.012591153245057388,\n \"acc_norm\": 0.4165580182529335,\n\
\ \"acc_norm_stderr\": 0.012591153245057388\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.5147058823529411,\n \"acc_stderr\": 0.03035969707904611,\n\
\ \"acc_norm\": 0.5147058823529411,\n \"acc_norm_stderr\": 0.03035969707904611\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.5784313725490197,\n \"acc_stderr\": 0.019977422600227474,\n \
\ \"acc_norm\": 0.5784313725490197,\n \"acc_norm_stderr\": 0.019977422600227474\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n\
\ \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n\
\ \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.6122448979591837,\n \"acc_stderr\": 0.031192230726795656,\n\
\ \"acc_norm\": 0.6122448979591837,\n \"acc_norm_stderr\": 0.031192230726795656\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7263681592039801,\n\
\ \"acc_stderr\": 0.03152439186555402,\n \"acc_norm\": 0.7263681592039801,\n\
\ \"acc_norm_stderr\": 0.03152439186555402\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \
\ \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n\
\ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4939759036144578,\n\
\ \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.4939759036144578,\n\
\ \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.7719298245614035,\n \"acc_stderr\": 0.032180937956023566,\n\
\ \"acc_norm\": 0.7719298245614035,\n \"acc_norm_stderr\": 0.032180937956023566\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.35006119951040393,\n\
\ \"mc1_stderr\": 0.01669794942015103,\n \"mc2\": 0.5051246541228124,\n\
\ \"mc2_stderr\": 0.015683474268697605\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7616416732438832,\n \"acc_stderr\": 0.011974948667702311\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.10371224832214765,\n \
\ \"em_stderr\": 0.003122327158910168,\n \"f1\": 0.1647325922818787,\n\
\ \"f1_stderr\": 0.003269141000174996\n },\n \"harness|gsm8k|5\": {\n\
\ \"acc\": 0.1508718726307809,\n \"acc_stderr\": 0.009859004137305687\n\
\ }\n}\n```"
repo_url: https://huggingface.co/ajibawa-2023/Uncensored-Jordan-13B
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|arc:challenge|25_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|drop|3_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|gsm8k|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hellaswag|10_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T18-01-22.350849.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T18-01-22.350849.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- '**/details_harness|winogrande|5_2023-11-18T18-01-22.350849.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T18-01-22.350849.parquet'
- config_name: results
data_files:
- split: 2023_11_18T18_01_22.350849
path:
- results_2023-11-18T18-01-22.350849.parquet
- split: latest
path:
- results_2023-11-18T18-01-22.350849.parquet
---
# Dataset Card for Evaluation run of ajibawa-2023/Uncensored-Jordan-13B
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/ajibawa-2023/Uncensored-Jordan-13B
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [ajibawa-2023/Uncensored-Jordan-13B](https://huggingface.co/ajibawa-2023/Uncensored-Jordan-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_ajibawa-2023__Uncensored-Jordan-13B_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T18:01:22.350849](https://huggingface.co/datasets/open-llm-leaderboard/details_ajibawa-2023__Uncensored-Jordan-13B_public/blob/main/results_2023-11-18T18-01-22.350849.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.5551346436733366,
"acc_stderr": 0.033773935379363566,
"acc_norm": 0.5623156588862028,
"acc_norm_stderr": 0.03452935511879212,
"mc1": 0.35006119951040393,
"mc1_stderr": 0.01669794942015103,
"mc2": 0.5051246541228124,
"mc2_stderr": 0.015683474268697605,
"em": 0.10371224832214765,
"em_stderr": 0.003122327158910168,
"f1": 0.1647325922818787,
"f1_stderr": 0.003269141000174996
},
"harness|arc:challenge|25": {
"acc": 0.5401023890784983,
"acc_stderr": 0.014564318856924848,
"acc_norm": 0.5742320819112628,
"acc_norm_stderr": 0.01444946427886881
},
"harness|hellaswag|10": {
"acc": 0.6352320254929297,
"acc_stderr": 0.004803812631994952,
"acc_norm": 0.8270264887472615,
"acc_norm_stderr": 0.0037745138826159514
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.4740740740740741,
"acc_stderr": 0.04313531696750574,
"acc_norm": 0.4740740740740741,
"acc_norm_stderr": 0.04313531696750574
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.5657894736842105,
"acc_stderr": 0.04033565667848319,
"acc_norm": 0.5657894736842105,
"acc_norm_stderr": 0.04033565667848319
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.57,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.57,
"acc_norm_stderr": 0.049756985195624284
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6075471698113207,
"acc_stderr": 0.03005258057955785,
"acc_norm": 0.6075471698113207,
"acc_norm_stderr": 0.03005258057955785
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.5833333333333334,
"acc_stderr": 0.041227287076512825,
"acc_norm": 0.5833333333333334,
"acc_norm_stderr": 0.041227287076512825
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.47,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5317919075144508,
"acc_stderr": 0.03804749744364764,
"acc_norm": 0.5317919075144508,
"acc_norm_stderr": 0.03804749744364764
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04690650298201942,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04690650298201942
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.68,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.68,
"acc_norm_stderr": 0.04688261722621505
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.4851063829787234,
"acc_stderr": 0.032671518489247764,
"acc_norm": 0.4851063829787234,
"acc_norm_stderr": 0.032671518489247764
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.21929824561403508,
"acc_stderr": 0.03892431106518754,
"acc_norm": 0.21929824561403508,
"acc_norm_stderr": 0.03892431106518754
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5448275862068965,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.5448275862068965,
"acc_norm_stderr": 0.04149886942192117
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.31746031746031744,
"acc_stderr": 0.023973861998992065,
"acc_norm": 0.31746031746031744,
"acc_norm_stderr": 0.023973861998992065
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.373015873015873,
"acc_stderr": 0.043255060420170854,
"acc_norm": 0.373015873015873,
"acc_norm_stderr": 0.043255060420170854
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.6516129032258065,
"acc_stderr": 0.027104826328100944,
"acc_norm": 0.6516129032258065,
"acc_norm_stderr": 0.027104826328100944
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.41379310344827586,
"acc_stderr": 0.03465304488406795,
"acc_norm": 0.41379310344827586,
"acc_norm_stderr": 0.03465304488406795
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.55,
"acc_stderr": 0.049999999999999996,
"acc_norm": 0.55,
"acc_norm_stderr": 0.049999999999999996
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.03663974994391244,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.03663974994391244
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.6919191919191919,
"acc_stderr": 0.032894773300986155,
"acc_norm": 0.6919191919191919,
"acc_norm_stderr": 0.032894773300986155
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8031088082901554,
"acc_stderr": 0.028697873971860688,
"acc_norm": 0.8031088082901554,
"acc_norm_stderr": 0.028697873971860688
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.4897435897435897,
"acc_stderr": 0.025345672221942374,
"acc_norm": 0.4897435897435897,
"acc_norm_stderr": 0.025345672221942374
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3037037037037037,
"acc_stderr": 0.02803792996911499,
"acc_norm": 0.3037037037037037,
"acc_norm_stderr": 0.02803792996911499
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.5588235294117647,
"acc_stderr": 0.032252942323996406,
"acc_norm": 0.5588235294117647,
"acc_norm_stderr": 0.032252942323996406
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.037579499229433426,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.037579499229433426
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7394495412844037,
"acc_stderr": 0.01881918203485007,
"acc_norm": 0.7394495412844037,
"acc_norm_stderr": 0.01881918203485007
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.03293377139415191,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.03293377139415191
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7794117647058824,
"acc_stderr": 0.02910225438967408,
"acc_norm": 0.7794117647058824,
"acc_norm_stderr": 0.02910225438967408
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.729957805907173,
"acc_stderr": 0.028900721906293426,
"acc_norm": 0.729957805907173,
"acc_norm_stderr": 0.028900721906293426
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6681614349775785,
"acc_stderr": 0.031602951437766785,
"acc_norm": 0.6681614349775785,
"acc_norm_stderr": 0.031602951437766785
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6259541984732825,
"acc_stderr": 0.042438692422305246,
"acc_norm": 0.6259541984732825,
"acc_norm_stderr": 0.042438692422305246
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.04065578140908705,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.04065578140908705
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.6388888888888888,
"acc_stderr": 0.04643454608906275,
"acc_norm": 0.6388888888888888,
"acc_norm_stderr": 0.04643454608906275
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.6687116564417178,
"acc_stderr": 0.03697983910025588,
"acc_norm": 0.6687116564417178,
"acc_norm_stderr": 0.03697983910025588
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.375,
"acc_stderr": 0.04595091388086298,
"acc_norm": 0.375,
"acc_norm_stderr": 0.04595091388086298
},
"harness|hendrycksTest-management|5": {
"acc": 0.6699029126213593,
"acc_stderr": 0.0465614711001235,
"acc_norm": 0.6699029126213593,
"acc_norm_stderr": 0.0465614711001235
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8376068376068376,
"acc_stderr": 0.02416161812798774,
"acc_norm": 0.8376068376068376,
"acc_norm_stderr": 0.02416161812798774
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7573435504469987,
"acc_stderr": 0.01532988894089986,
"acc_norm": 0.7573435504469987,
"acc_norm_stderr": 0.01532988894089986
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.630057803468208,
"acc_stderr": 0.02599247202930639,
"acc_norm": 0.630057803468208,
"acc_norm_stderr": 0.02599247202930639
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.41899441340782123,
"acc_stderr": 0.016501579306861677,
"acc_norm": 0.41899441340782123,
"acc_norm_stderr": 0.016501579306861677
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.630718954248366,
"acc_stderr": 0.02763417668960266,
"acc_norm": 0.630718954248366,
"acc_norm_stderr": 0.02763417668960266
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6366559485530546,
"acc_stderr": 0.027316847674192707,
"acc_norm": 0.6366559485530546,
"acc_norm_stderr": 0.027316847674192707
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.6419753086419753,
"acc_stderr": 0.02667561192603709,
"acc_norm": 0.6419753086419753,
"acc_norm_stderr": 0.02667561192603709
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.39361702127659576,
"acc_stderr": 0.029144544781596136,
"acc_norm": 0.39361702127659576,
"acc_norm_stderr": 0.029144544781596136
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4165580182529335,
"acc_stderr": 0.012591153245057388,
"acc_norm": 0.4165580182529335,
"acc_norm_stderr": 0.012591153245057388
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.5147058823529411,
"acc_stderr": 0.03035969707904611,
"acc_norm": 0.5147058823529411,
"acc_norm_stderr": 0.03035969707904611
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.5784313725490197,
"acc_stderr": 0.019977422600227474,
"acc_norm": 0.5784313725490197,
"acc_norm_stderr": 0.019977422600227474
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6454545454545455,
"acc_stderr": 0.045820048415054174,
"acc_norm": 0.6454545454545455,
"acc_norm_stderr": 0.045820048415054174
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6122448979591837,
"acc_stderr": 0.031192230726795656,
"acc_norm": 0.6122448979591837,
"acc_norm_stderr": 0.031192230726795656
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.7263681592039801,
"acc_stderr": 0.03152439186555402,
"acc_norm": 0.7263681592039801,
"acc_norm_stderr": 0.03152439186555402
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.8,
"acc_stderr": 0.04020151261036846,
"acc_norm": 0.8,
"acc_norm_stderr": 0.04020151261036846
},
"harness|hendrycksTest-virology|5": {
"acc": 0.4939759036144578,
"acc_stderr": 0.03892212195333045,
"acc_norm": 0.4939759036144578,
"acc_norm_stderr": 0.03892212195333045
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.7719298245614035,
"acc_stderr": 0.032180937956023566,
"acc_norm": 0.7719298245614035,
"acc_norm_stderr": 0.032180937956023566
},
"harness|truthfulqa:mc|0": {
"mc1": 0.35006119951040393,
"mc1_stderr": 0.01669794942015103,
"mc2": 0.5051246541228124,
"mc2_stderr": 0.015683474268697605
},
"harness|winogrande|5": {
"acc": 0.7616416732438832,
"acc_stderr": 0.011974948667702311
},
"harness|drop|3": {
"em": 0.10371224832214765,
"em_stderr": 0.003122327158910168,
"f1": 0.1647325922818787,
"f1_stderr": 0.003269141000174996
},
"harness|gsm8k|5": {
"acc": 0.1508718726307809,
"acc_stderr": 0.009859004137305687
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7484965920448303,
-0.8508415818214417,
0.25665614008903503,
0.2583930492401123,
-0.20477046072483063,
-0.06859070062637329,
0.013710519298911095,
-0.23248547315597534,
0.5488646030426025,
-0.012962913140654564,
-0.47876712679862976,
-0.7135379314422607,
-0.46909385919570923,
0.232343345... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
MoonIcee/joaoo | MoonIcee | 2023-11-18T18:04:55Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-18T18:04:55Z | 2023-11-18T18:04:43.000Z | 2023-11-18T18:04:43 | ---
license: openrail
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_KnutJaegersberg__MistralInstructLongish_public | open-llm-leaderboard | 2023-11-18T18:10:22Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T18:10:22Z | 2023-11-18T18:09:34.000Z | 2023-11-18T18:09:34 | ---
pretty_name: Evaluation run of KnutJaegersberg/MistralInstructLongish
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [KnutJaegersberg/MistralInstructLongish](https://huggingface.co/KnutJaegersberg/MistralInstructLongish)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KnutJaegersberg__MistralInstructLongish_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T18:06:36.075482](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__MistralInstructLongish_public/blob/main/results_2023-11-18T18-06-36.075482.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5973103007065012,\n\
\ \"acc_stderr\": 0.03292863737262349,\n \"acc_norm\": 0.6085223268477976,\n\
\ \"acc_norm_stderr\": 0.033764939289429516,\n \"mc1\": 0.2729498164014688,\n\
\ \"mc1_stderr\": 0.015594753632006526,\n \"mc2\": 0.4055061003617047,\n\
\ \"mc2_stderr\": 0.014261205384601018,\n \"em\": 0.06910654362416108,\n\
\ \"em_stderr\": 0.0025974621402952,\n \"f1\": 0.21221371644295373,\n\
\ \"f1_stderr\": 0.00318177597759032\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5537542662116041,\n \"acc_stderr\": 0.014526705548539978,\n\
\ \"acc_norm\": 0.6075085324232082,\n \"acc_norm_stderr\": 0.014269634635670717\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6246763592909779,\n\
\ \"acc_stderr\": 0.004832167854501645,\n \"acc_norm\": 0.8185620394343757,\n\
\ \"acc_norm_stderr\": 0.0038459301696437916\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \
\ \"acc_stderr\": 0.042320736951515885,\n \"acc_norm\": 0.6,\n \"\
acc_norm_stderr\": 0.042320736951515885\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.625,\n \"acc_stderr\": 0.039397364351956274,\n \
\ \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.039397364351956274\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n\
\ \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \
\ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.02854479331905533,\n\
\ \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.02854479331905533\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6527777777777778,\n\
\ \"acc_stderr\": 0.039812405437178615,\n \"acc_norm\": 0.6527777777777778,\n\
\ \"acc_norm_stderr\": 0.039812405437178615\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \
\ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\"\
: 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \
\ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6011560693641619,\n\
\ \"acc_stderr\": 0.037336266553835096,\n \"acc_norm\": 0.6011560693641619,\n\
\ \"acc_norm_stderr\": 0.037336266553835096\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266344,\n\
\ \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266344\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n\
\ \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5617021276595745,\n \"acc_stderr\": 0.03243618636108101,\n\
\ \"acc_norm\": 0.5617021276595745,\n \"acc_norm_stderr\": 0.03243618636108101\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n\
\ \"acc_stderr\": 0.04692008381368909,\n \"acc_norm\": 0.4649122807017544,\n\
\ \"acc_norm_stderr\": 0.04692008381368909\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n\
\ \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.3915343915343915,\n \"acc_stderr\": 0.02513809138885111,\n \"\
acc_norm\": 0.3915343915343915,\n \"acc_norm_stderr\": 0.02513809138885111\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3412698412698413,\n\
\ \"acc_stderr\": 0.04240799327574924,\n \"acc_norm\": 0.3412698412698413,\n\
\ \"acc_norm_stderr\": 0.04240799327574924\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6967741935483871,\n\
\ \"acc_stderr\": 0.02614868593067175,\n \"acc_norm\": 0.6967741935483871,\n\
\ \"acc_norm_stderr\": 0.02614868593067175\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.45320197044334976,\n \"acc_stderr\": 0.03502544650845872,\n\
\ \"acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.03502544650845872\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\"\
: 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.703030303030303,\n \"acc_stderr\": 0.0356796977226805,\n\
\ \"acc_norm\": 0.703030303030303,\n \"acc_norm_stderr\": 0.0356796977226805\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7424242424242424,\n \"acc_stderr\": 0.031156269519646836,\n \"\
acc_norm\": 0.7424242424242424,\n \"acc_norm_stderr\": 0.031156269519646836\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8393782383419689,\n \"acc_stderr\": 0.026499057701397436,\n\
\ \"acc_norm\": 0.8393782383419689,\n \"acc_norm_stderr\": 0.026499057701397436\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6076923076923076,\n \"acc_stderr\": 0.024756000382130956,\n\
\ \"acc_norm\": 0.6076923076923076,\n \"acc_norm_stderr\": 0.024756000382130956\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131137,\n \
\ \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131137\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.5882352941176471,\n \"acc_stderr\": 0.031968769891957786,\n\
\ \"acc_norm\": 0.5882352941176471,\n \"acc_norm_stderr\": 0.031968769891957786\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"\
acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.7853211009174312,\n \"acc_stderr\": 0.017604304149256483,\n \"\
acc_norm\": 0.7853211009174312,\n \"acc_norm_stderr\": 0.017604304149256483\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.4537037037037037,\n \"acc_stderr\": 0.03395322726375797,\n \"\
acc_norm\": 0.4537037037037037,\n \"acc_norm_stderr\": 0.03395322726375797\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.75,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.75,\n\
\ \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\
: {\n \"acc\": 0.7637130801687764,\n \"acc_stderr\": 0.027652153144159253,\n\
\ \"acc_norm\": 0.7637130801687764,\n \"acc_norm_stderr\": 0.027652153144159253\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6502242152466368,\n\
\ \"acc_stderr\": 0.03200736719484503,\n \"acc_norm\": 0.6502242152466368,\n\
\ \"acc_norm_stderr\": 0.03200736719484503\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7175572519083969,\n \"acc_stderr\": 0.03948406125768361,\n\
\ \"acc_norm\": 0.7175572519083969,\n \"acc_norm_stderr\": 0.03948406125768361\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7355371900826446,\n \"acc_stderr\": 0.040261875275912046,\n \"\
acc_norm\": 0.7355371900826446,\n \"acc_norm_stderr\": 0.040261875275912046\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n\
\ \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n\
\ \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.0335195387952127,\n\
\ \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.0335195387952127\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n\
\ \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n\
\ \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.04058042015646034,\n\
\ \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.04058042015646034\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8376068376068376,\n\
\ \"acc_stderr\": 0.02416161812798774,\n \"acc_norm\": 0.8376068376068376,\n\
\ \"acc_norm_stderr\": 0.02416161812798774\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \
\ \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7943805874840357,\n\
\ \"acc_stderr\": 0.01445250045678583,\n \"acc_norm\": 0.7943805874840357,\n\
\ \"acc_norm_stderr\": 0.01445250045678583\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.630057803468208,\n \"acc_stderr\": 0.025992472029306386,\n\
\ \"acc_norm\": 0.630057803468208,\n \"acc_norm_stderr\": 0.025992472029306386\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2581005586592179,\n\
\ \"acc_stderr\": 0.014635185616527822,\n \"acc_norm\": 0.2581005586592179,\n\
\ \"acc_norm_stderr\": 0.014635185616527822\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.696078431372549,\n \"acc_stderr\": 0.02633661346904663,\n\
\ \"acc_norm\": 0.696078431372549,\n \"acc_norm_stderr\": 0.02633661346904663\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6527331189710611,\n\
\ \"acc_stderr\": 0.027040745502307336,\n \"acc_norm\": 0.6527331189710611,\n\
\ \"acc_norm_stderr\": 0.027040745502307336\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.026229649178821163,\n\
\ \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.026229649178821163\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4219858156028369,\n \"acc_stderr\": 0.029462189233370593,\n \
\ \"acc_norm\": 0.4219858156028369,\n \"acc_norm_stderr\": 0.029462189233370593\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.408735332464146,\n\
\ \"acc_stderr\": 0.01255570134670338,\n \"acc_norm\": 0.408735332464146,\n\
\ \"acc_norm_stderr\": 0.01255570134670338\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.5919117647058824,\n \"acc_stderr\": 0.029855261393483924,\n\
\ \"acc_norm\": 0.5919117647058824,\n \"acc_norm_stderr\": 0.029855261393483924\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6258169934640523,\n \"acc_stderr\": 0.019576953122088826,\n \
\ \"acc_norm\": 0.6258169934640523,\n \"acc_norm_stderr\": 0.019576953122088826\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n\
\ \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n\
\ \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.6326530612244898,\n \"acc_stderr\": 0.03086214492108757,\n\
\ \"acc_norm\": 0.6326530612244898,\n \"acc_norm_stderr\": 0.03086214492108757\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8208955223880597,\n\
\ \"acc_stderr\": 0.027113286753111837,\n \"acc_norm\": 0.8208955223880597,\n\
\ \"acc_norm_stderr\": 0.027113286753111837\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \
\ \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5,\n \
\ \"acc_stderr\": 0.03892494720807614,\n \"acc_norm\": 0.5,\n \
\ \"acc_norm_stderr\": 0.03892494720807614\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n\
\ \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2729498164014688,\n\
\ \"mc1_stderr\": 0.015594753632006526,\n \"mc2\": 0.4055061003617047,\n\
\ \"mc2_stderr\": 0.014261205384601018\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7655880031570639,\n \"acc_stderr\": 0.011906130106237986\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.06910654362416108,\n \
\ \"em_stderr\": 0.0025974621402952,\n \"f1\": 0.21221371644295373,\n \
\ \"f1_stderr\": 0.00318177597759032\n },\n \"harness|gsm8k|5\": {\n\
\ \"acc\": 0.015163002274450341,\n \"acc_stderr\": 0.00336602294972636\n\
\ }\n}\n```"
repo_url: https://huggingface.co/KnutJaegersberg/MistralInstructLongish
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|arc:challenge|25_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|drop|3_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|gsm8k|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hellaswag|10_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T18-06-36.075482.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T18-06-36.075482.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- '**/details_harness|winogrande|5_2023-11-18T18-06-36.075482.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T18-06-36.075482.parquet'
- config_name: results
data_files:
- split: 2023_11_18T18_06_36.075482
path:
- results_2023-11-18T18-06-36.075482.parquet
- split: latest
path:
- results_2023-11-18T18-06-36.075482.parquet
---
# Dataset Card for Evaluation run of KnutJaegersberg/MistralInstructLongish
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/KnutJaegersberg/MistralInstructLongish
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [KnutJaegersberg/MistralInstructLongish](https://huggingface.co/KnutJaegersberg/MistralInstructLongish) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_KnutJaegersberg__MistralInstructLongish_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T18:06:36.075482](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__MistralInstructLongish_public/blob/main/results_2023-11-18T18-06-36.075482.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.5973103007065012,
"acc_stderr": 0.03292863737262349,
"acc_norm": 0.6085223268477976,
"acc_norm_stderr": 0.033764939289429516,
"mc1": 0.2729498164014688,
"mc1_stderr": 0.015594753632006526,
"mc2": 0.4055061003617047,
"mc2_stderr": 0.014261205384601018,
"em": 0.06910654362416108,
"em_stderr": 0.0025974621402952,
"f1": 0.21221371644295373,
"f1_stderr": 0.00318177597759032
},
"harness|arc:challenge|25": {
"acc": 0.5537542662116041,
"acc_stderr": 0.014526705548539978,
"acc_norm": 0.6075085324232082,
"acc_norm_stderr": 0.014269634635670717
},
"harness|hellaswag|10": {
"acc": 0.6246763592909779,
"acc_stderr": 0.004832167854501645,
"acc_norm": 0.8185620394343757,
"acc_norm_stderr": 0.0038459301696437916
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6,
"acc_stderr": 0.042320736951515885,
"acc_norm": 0.6,
"acc_norm_stderr": 0.042320736951515885
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.625,
"acc_stderr": 0.039397364351956274,
"acc_norm": 0.625,
"acc_norm_stderr": 0.039397364351956274
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6867924528301886,
"acc_stderr": 0.02854479331905533,
"acc_norm": 0.6867924528301886,
"acc_norm_stderr": 0.02854479331905533
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.6527777777777778,
"acc_stderr": 0.039812405437178615,
"acc_norm": 0.6527777777777778,
"acc_norm_stderr": 0.039812405437178615
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6011560693641619,
"acc_stderr": 0.037336266553835096,
"acc_norm": 0.6011560693641619,
"acc_norm_stderr": 0.037336266553835096
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.39215686274509803,
"acc_stderr": 0.04858083574266344,
"acc_norm": 0.39215686274509803,
"acc_norm_stderr": 0.04858083574266344
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5617021276595745,
"acc_stderr": 0.03243618636108101,
"acc_norm": 0.5617021276595745,
"acc_norm_stderr": 0.03243618636108101
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4649122807017544,
"acc_stderr": 0.04692008381368909,
"acc_norm": 0.4649122807017544,
"acc_norm_stderr": 0.04692008381368909
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5517241379310345,
"acc_stderr": 0.04144311810878152,
"acc_norm": 0.5517241379310345,
"acc_norm_stderr": 0.04144311810878152
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3915343915343915,
"acc_stderr": 0.02513809138885111,
"acc_norm": 0.3915343915343915,
"acc_norm_stderr": 0.02513809138885111
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.3412698412698413,
"acc_stderr": 0.04240799327574924,
"acc_norm": 0.3412698412698413,
"acc_norm_stderr": 0.04240799327574924
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.6967741935483871,
"acc_stderr": 0.02614868593067175,
"acc_norm": 0.6967741935483871,
"acc_norm_stderr": 0.02614868593067175
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.45320197044334976,
"acc_stderr": 0.03502544650845872,
"acc_norm": 0.45320197044334976,
"acc_norm_stderr": 0.03502544650845872
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.68,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.68,
"acc_norm_stderr": 0.04688261722621504
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.703030303030303,
"acc_stderr": 0.0356796977226805,
"acc_norm": 0.703030303030303,
"acc_norm_stderr": 0.0356796977226805
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7424242424242424,
"acc_stderr": 0.031156269519646836,
"acc_norm": 0.7424242424242424,
"acc_norm_stderr": 0.031156269519646836
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8393782383419689,
"acc_stderr": 0.026499057701397436,
"acc_norm": 0.8393782383419689,
"acc_norm_stderr": 0.026499057701397436
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6076923076923076,
"acc_stderr": 0.024756000382130956,
"acc_norm": 0.6076923076923076,
"acc_norm_stderr": 0.024756000382130956
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.028897748741131137,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.028897748741131137
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.5882352941176471,
"acc_stderr": 0.031968769891957786,
"acc_norm": 0.5882352941176471,
"acc_norm_stderr": 0.031968769891957786
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3509933774834437,
"acc_stderr": 0.03896981964257375,
"acc_norm": 0.3509933774834437,
"acc_norm_stderr": 0.03896981964257375
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7853211009174312,
"acc_stderr": 0.017604304149256483,
"acc_norm": 0.7853211009174312,
"acc_norm_stderr": 0.017604304149256483
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.4537037037037037,
"acc_stderr": 0.03395322726375797,
"acc_norm": 0.4537037037037037,
"acc_norm_stderr": 0.03395322726375797
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.75,
"acc_stderr": 0.03039153369274154,
"acc_norm": 0.75,
"acc_norm_stderr": 0.03039153369274154
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7637130801687764,
"acc_stderr": 0.027652153144159253,
"acc_norm": 0.7637130801687764,
"acc_norm_stderr": 0.027652153144159253
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6502242152466368,
"acc_stderr": 0.03200736719484503,
"acc_norm": 0.6502242152466368,
"acc_norm_stderr": 0.03200736719484503
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7175572519083969,
"acc_stderr": 0.03948406125768361,
"acc_norm": 0.7175572519083969,
"acc_norm_stderr": 0.03948406125768361
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7355371900826446,
"acc_stderr": 0.040261875275912046,
"acc_norm": 0.7355371900826446,
"acc_norm_stderr": 0.040261875275912046
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7314814814814815,
"acc_stderr": 0.042844679680521934,
"acc_norm": 0.7314814814814815,
"acc_norm_stderr": 0.042844679680521934
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7607361963190185,
"acc_stderr": 0.0335195387952127,
"acc_norm": 0.7607361963190185,
"acc_norm_stderr": 0.0335195387952127
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.48214285714285715,
"acc_stderr": 0.047427623612430116,
"acc_norm": 0.48214285714285715,
"acc_norm_stderr": 0.047427623612430116
},
"harness|hendrycksTest-management|5": {
"acc": 0.7864077669902912,
"acc_stderr": 0.04058042015646034,
"acc_norm": 0.7864077669902912,
"acc_norm_stderr": 0.04058042015646034
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8376068376068376,
"acc_stderr": 0.02416161812798774,
"acc_norm": 0.8376068376068376,
"acc_norm_stderr": 0.02416161812798774
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7943805874840357,
"acc_stderr": 0.01445250045678583,
"acc_norm": 0.7943805874840357,
"acc_norm_stderr": 0.01445250045678583
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.630057803468208,
"acc_stderr": 0.025992472029306386,
"acc_norm": 0.630057803468208,
"acc_norm_stderr": 0.025992472029306386
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.2581005586592179,
"acc_stderr": 0.014635185616527822,
"acc_norm": 0.2581005586592179,
"acc_norm_stderr": 0.014635185616527822
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.696078431372549,
"acc_stderr": 0.02633661346904663,
"acc_norm": 0.696078431372549,
"acc_norm_stderr": 0.02633661346904663
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6527331189710611,
"acc_stderr": 0.027040745502307336,
"acc_norm": 0.6527331189710611,
"acc_norm_stderr": 0.027040745502307336
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.026229649178821163,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.026229649178821163
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4219858156028369,
"acc_stderr": 0.029462189233370593,
"acc_norm": 0.4219858156028369,
"acc_norm_stderr": 0.029462189233370593
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.408735332464146,
"acc_stderr": 0.01255570134670338,
"acc_norm": 0.408735332464146,
"acc_norm_stderr": 0.01255570134670338
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.5919117647058824,
"acc_stderr": 0.029855261393483924,
"acc_norm": 0.5919117647058824,
"acc_norm_stderr": 0.029855261393483924
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6258169934640523,
"acc_stderr": 0.019576953122088826,
"acc_norm": 0.6258169934640523,
"acc_norm_stderr": 0.019576953122088826
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6545454545454545,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.6545454545454545,
"acc_norm_stderr": 0.04554619617541054
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6326530612244898,
"acc_stderr": 0.03086214492108757,
"acc_norm": 0.6326530612244898,
"acc_norm_stderr": 0.03086214492108757
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8208955223880597,
"acc_stderr": 0.027113286753111837,
"acc_norm": 0.8208955223880597,
"acc_norm_stderr": 0.027113286753111837
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.82,
"acc_stderr": 0.038612291966536934,
"acc_norm": 0.82,
"acc_norm_stderr": 0.038612291966536934
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5,
"acc_stderr": 0.03892494720807614,
"acc_norm": 0.5,
"acc_norm_stderr": 0.03892494720807614
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8362573099415205,
"acc_stderr": 0.028380919596145866,
"acc_norm": 0.8362573099415205,
"acc_norm_stderr": 0.028380919596145866
},
"harness|truthfulqa:mc|0": {
"mc1": 0.2729498164014688,
"mc1_stderr": 0.015594753632006526,
"mc2": 0.4055061003617047,
"mc2_stderr": 0.014261205384601018
},
"harness|winogrande|5": {
"acc": 0.7655880031570639,
"acc_stderr": 0.011906130106237986
},
"harness|drop|3": {
"em": 0.06910654362416108,
"em_stderr": 0.0025974621402952,
"f1": 0.21221371644295373,
"f1_stderr": 0.00318177597759032
},
"harness|gsm8k|5": {
"acc": 0.015163002274450341,
"acc_stderr": 0.00336602294972636
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7308540940284729,
-0.8465875387191772,
0.31182199716567993,
0.22513015568256378,
-0.180007666349411,
-0.07523708045482635,
-0.013031587935984135,
-0.23072296380996704,
0.5437802672386169,
-0.03249965235590935,
-0.48469260334968567,
-0.7060972452163696,
-0.44267454743385315,
0.2322700470... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Gabriel1322/jotace | Gabriel1322 | 2023-11-18T18:18:29Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-18T18:18:29Z | 2023-11-18T18:18:03.000Z | 2023-11-18T18:18:03 | ---
license: openrail
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
MichaelJH/Ryu-AI_dohyun-byDay_untokenized.datadict | MichaelJH | 2023-11-18T18:32:46Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T18:32:46Z | 2023-11-18T18:32:43.000Z | 2023-11-18T18:32:43 | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 23534482
num_examples: 9545
download_size: 2752380
dataset_size: 23534482
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Buggy23/looney_tunes | Buggy23 | 2023-11-18T20:10:37Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T20:10:37Z | 2023-11-18T19:21:49.000Z | 2023-11-18T19:21:49 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_lgaalves__mistral-7b_open_platypus_public | open-llm-leaderboard | 2023-11-18T19:24:14Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T19:24:14Z | 2023-11-18T19:23:24.000Z | 2023-11-18T19:23:24 | ---
pretty_name: Evaluation run of lgaalves/mistral-7b_open_platypus
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [lgaalves/mistral-7b_open_platypus](https://huggingface.co/lgaalves/mistral-7b_open_platypus)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_lgaalves__mistral-7b_open_platypus_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T19:20:26.136874](https://huggingface.co/datasets/open-llm-leaderboard/details_lgaalves__mistral-7b_open_platypus_public/blob/main/results_2023-11-18T19-20-26.136874.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5921618091275235,\n\
\ \"acc_stderr\": 0.033165593817109554,\n \"acc_norm\": 0.6007436240197009,\n\
\ \"acc_norm_stderr\": 0.03392093055241413,\n \"mc1\": 0.3292533659730722,\n\
\ \"mc1_stderr\": 0.016451264440068232,\n \"mc2\": 0.48869138188349615,\n\
\ \"mc2_stderr\": 0.0147358552004315,\n \"em\": 0.0036703020134228187,\n\
\ \"em_stderr\": 0.0006192871806511272,\n \"f1\": 0.06589450503355675,\n\
\ \"f1_stderr\": 0.0014663770308574477\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5332764505119454,\n \"acc_stderr\": 0.014578995859605808,\n\
\ \"acc_norm\": 0.5580204778156996,\n \"acc_norm_stderr\": 0.014512682523128343\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6120294761999602,\n\
\ \"acc_stderr\": 0.004862919176408075,\n \"acc_norm\": 0.8212507468631747,\n\
\ \"acc_norm_stderr\": 0.003823591814133036\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \
\ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n\
\ \"acc_stderr\": 0.042763494943765995,\n \"acc_norm\": 0.5703703703703704,\n\
\ \"acc_norm_stderr\": 0.042763494943765995\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6381578947368421,\n \"acc_stderr\": 0.03910525752849724,\n\
\ \"acc_norm\": 0.6381578947368421,\n \"acc_norm_stderr\": 0.03910525752849724\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n\
\ \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\"\
: 0.05\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"\
acc\": 0.6566037735849056,\n \"acc_stderr\": 0.02922452646912479,\n \
\ \"acc_norm\": 0.6566037735849056,\n \"acc_norm_stderr\": 0.02922452646912479\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6458333333333334,\n\
\ \"acc_stderr\": 0.039994111357535424,\n \"acc_norm\": 0.6458333333333334,\n\
\ \"acc_norm_stderr\": 0.039994111357535424\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \
\ \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n\
\ \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709390974,\n \
\ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709390974\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5895953757225434,\n\
\ \"acc_stderr\": 0.03750757044895536,\n \"acc_norm\": 0.5895953757225434,\n\
\ \"acc_norm_stderr\": 0.03750757044895536\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.3137254901960784,\n \"acc_stderr\": 0.04617034827006717,\n\
\ \"acc_norm\": 0.3137254901960784,\n \"acc_norm_stderr\": 0.04617034827006717\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n\
\ \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.4978723404255319,\n \"acc_stderr\": 0.03268572658667492,\n\
\ \"acc_norm\": 0.4978723404255319,\n \"acc_norm_stderr\": 0.03268572658667492\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n\
\ \"acc_stderr\": 0.04692008381368909,\n \"acc_norm\": 0.4649122807017544,\n\
\ \"acc_norm_stderr\": 0.04692008381368909\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5103448275862069,\n \"acc_stderr\": 0.041657747757287644,\n\
\ \"acc_norm\": 0.5103448275862069,\n \"acc_norm_stderr\": 0.041657747757287644\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.42328042328042326,\n \"acc_stderr\": 0.025446365634406776,\n \"\
acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.025446365634406776\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.373015873015873,\n\
\ \"acc_stderr\": 0.04325506042017086,\n \"acc_norm\": 0.373015873015873,\n\
\ \"acc_norm_stderr\": 0.04325506042017086\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \
\ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6806451612903226,\n\
\ \"acc_stderr\": 0.026522709674667765,\n \"acc_norm\": 0.6806451612903226,\n\
\ \"acc_norm_stderr\": 0.026522709674667765\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.46798029556650245,\n \"acc_stderr\": 0.03510766597959217,\n\
\ \"acc_norm\": 0.46798029556650245,\n \"acc_norm_stderr\": 0.03510766597959217\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\"\
: 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7393939393939394,\n \"acc_stderr\": 0.034277431758165236,\n\
\ \"acc_norm\": 0.7393939393939394,\n \"acc_norm_stderr\": 0.034277431758165236\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"\
acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8497409326424871,\n \"acc_stderr\": 0.025787723180723872,\n\
\ \"acc_norm\": 0.8497409326424871,\n \"acc_norm_stderr\": 0.025787723180723872\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.5512820512820513,\n \"acc_stderr\": 0.025217315184846486,\n\
\ \"acc_norm\": 0.5512820512820513,\n \"acc_norm_stderr\": 0.025217315184846486\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.2814814814814815,\n \"acc_stderr\": 0.02742001935094528,\n \
\ \"acc_norm\": 0.2814814814814815,\n \"acc_norm_stderr\": 0.02742001935094528\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.5672268907563025,\n \"acc_stderr\": 0.032183581077426124,\n\
\ \"acc_norm\": 0.5672268907563025,\n \"acc_norm_stderr\": 0.032183581077426124\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3443708609271523,\n \"acc_stderr\": 0.03879687024073327,\n \"\
acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.03879687024073327\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.7724770642201835,\n \"acc_stderr\": 0.017974463578776502,\n \"\
acc_norm\": 0.7724770642201835,\n \"acc_norm_stderr\": 0.017974463578776502\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.39814814814814814,\n \"acc_stderr\": 0.033384734032074016,\n \"\
acc_norm\": 0.39814814814814814,\n \"acc_norm_stderr\": 0.033384734032074016\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.7696078431372549,\n \"acc_stderr\": 0.02955429260569507,\n \"\
acc_norm\": 0.7696078431372549,\n \"acc_norm_stderr\": 0.02955429260569507\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7848101265822784,\n \"acc_stderr\": 0.026750826994676177,\n \
\ \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.026750826994676177\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.726457399103139,\n\
\ \"acc_stderr\": 0.029918586707798834,\n \"acc_norm\": 0.726457399103139,\n\
\ \"acc_norm_stderr\": 0.029918586707798834\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.6564885496183206,\n \"acc_stderr\": 0.041649760719448786,\n\
\ \"acc_norm\": 0.6564885496183206,\n \"acc_norm_stderr\": 0.041649760719448786\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.8429752066115702,\n \"acc_stderr\": 0.03321244842547128,\n \"\
acc_norm\": 0.8429752066115702,\n \"acc_norm_stderr\": 0.03321244842547128\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7037037037037037,\n\
\ \"acc_stderr\": 0.044143436668549335,\n \"acc_norm\": 0.7037037037037037,\n\
\ \"acc_norm_stderr\": 0.044143436668549335\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n\
\ \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n\
\ \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n\
\ \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.6990291262135923,\n \"acc_stderr\": 0.04541609446503948,\n\
\ \"acc_norm\": 0.6990291262135923,\n \"acc_norm_stderr\": 0.04541609446503948\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8333333333333334,\n\
\ \"acc_stderr\": 0.024414947304543678,\n \"acc_norm\": 0.8333333333333334,\n\
\ \"acc_norm_stderr\": 0.024414947304543678\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7956577266922095,\n\
\ \"acc_stderr\": 0.014419123980931895,\n \"acc_norm\": 0.7956577266922095,\n\
\ \"acc_norm_stderr\": 0.014419123980931895\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.6936416184971098,\n \"acc_stderr\": 0.024818350129436593,\n\
\ \"acc_norm\": 0.6936416184971098,\n \"acc_norm_stderr\": 0.024818350129436593\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.376536312849162,\n\
\ \"acc_stderr\": 0.016204672385106603,\n \"acc_norm\": 0.376536312849162,\n\
\ \"acc_norm_stderr\": 0.016204672385106603\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.027363593284684972,\n\
\ \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.027363593284684972\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n\
\ \"acc_stderr\": 0.025922371788818763,\n \"acc_norm\": 0.7041800643086816,\n\
\ \"acc_norm_stderr\": 0.025922371788818763\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.02517104191530968,\n\
\ \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.02517104191530968\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \
\ \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.44589308996088656,\n\
\ \"acc_stderr\": 0.012695244711379774,\n \"acc_norm\": 0.44589308996088656,\n\
\ \"acc_norm_stderr\": 0.012695244711379774\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.5919117647058824,\n \"acc_stderr\": 0.029855261393483924,\n\
\ \"acc_norm\": 0.5919117647058824,\n \"acc_norm_stderr\": 0.029855261393483924\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6209150326797386,\n \"acc_stderr\": 0.01962744474841223,\n \
\ \"acc_norm\": 0.6209150326797386,\n \"acc_norm_stderr\": 0.01962744474841223\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6272727272727273,\n\
\ \"acc_stderr\": 0.04631381319425465,\n \"acc_norm\": 0.6272727272727273,\n\
\ \"acc_norm_stderr\": 0.04631381319425465\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.6244897959183674,\n \"acc_stderr\": 0.03100120903989484,\n\
\ \"acc_norm\": 0.6244897959183674,\n \"acc_norm_stderr\": 0.03100120903989484\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7512437810945274,\n\
\ \"acc_stderr\": 0.030567675938916714,\n \"acc_norm\": 0.7512437810945274,\n\
\ \"acc_norm_stderr\": 0.030567675938916714\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \
\ \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4879518072289157,\n\
\ \"acc_stderr\": 0.0389136449583582,\n \"acc_norm\": 0.4879518072289157,\n\
\ \"acc_norm_stderr\": 0.0389136449583582\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.031885780176863984,\n\
\ \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.031885780176863984\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3292533659730722,\n\
\ \"mc1_stderr\": 0.016451264440068232,\n \"mc2\": 0.48869138188349615,\n\
\ \"mc2_stderr\": 0.0147358552004315\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7861089187056038,\n \"acc_stderr\": 0.011524466954090254\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.0036703020134228187,\n \
\ \"em_stderr\": 0.0006192871806511272,\n \"f1\": 0.06589450503355675,\n\
\ \"f1_stderr\": 0.0014663770308574477\n },\n \"harness|gsm8k|5\":\
\ {\n \"acc\": 0.12585291887793784,\n \"acc_stderr\": 0.009136212598406307\n\
\ }\n}\n```"
repo_url: https://huggingface.co/lgaalves/mistral-7b_open_platypus
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|arc:challenge|25_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|drop|3_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|gsm8k|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hellaswag|10_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-20-26.136874.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T19-20-26.136874.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- '**/details_harness|winogrande|5_2023-11-18T19-20-26.136874.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T19-20-26.136874.parquet'
- config_name: results
data_files:
- split: 2023_11_18T19_20_26.136874
path:
- results_2023-11-18T19-20-26.136874.parquet
- split: latest
path:
- results_2023-11-18T19-20-26.136874.parquet
---
# Dataset Card for Evaluation run of lgaalves/mistral-7b_open_platypus
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/lgaalves/mistral-7b_open_platypus
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [lgaalves/mistral-7b_open_platypus](https://huggingface.co/lgaalves/mistral-7b_open_platypus) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_lgaalves__mistral-7b_open_platypus_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T19:20:26.136874](https://huggingface.co/datasets/open-llm-leaderboard/details_lgaalves__mistral-7b_open_platypus_public/blob/main/results_2023-11-18T19-20-26.136874.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.5921618091275235,
"acc_stderr": 0.033165593817109554,
"acc_norm": 0.6007436240197009,
"acc_norm_stderr": 0.03392093055241413,
"mc1": 0.3292533659730722,
"mc1_stderr": 0.016451264440068232,
"mc2": 0.48869138188349615,
"mc2_stderr": 0.0147358552004315,
"em": 0.0036703020134228187,
"em_stderr": 0.0006192871806511272,
"f1": 0.06589450503355675,
"f1_stderr": 0.0014663770308574477
},
"harness|arc:challenge|25": {
"acc": 0.5332764505119454,
"acc_stderr": 0.014578995859605808,
"acc_norm": 0.5580204778156996,
"acc_norm_stderr": 0.014512682523128343
},
"harness|hellaswag|10": {
"acc": 0.6120294761999602,
"acc_stderr": 0.004862919176408075,
"acc_norm": 0.8212507468631747,
"acc_norm_stderr": 0.003823591814133036
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5703703703703704,
"acc_stderr": 0.042763494943765995,
"acc_norm": 0.5703703703703704,
"acc_norm_stderr": 0.042763494943765995
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6381578947368421,
"acc_stderr": 0.03910525752849724,
"acc_norm": 0.6381578947368421,
"acc_norm_stderr": 0.03910525752849724
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.55,
"acc_stderr": 0.05,
"acc_norm": 0.55,
"acc_norm_stderr": 0.05
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6566037735849056,
"acc_stderr": 0.02922452646912479,
"acc_norm": 0.6566037735849056,
"acc_norm_stderr": 0.02922452646912479
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.6458333333333334,
"acc_stderr": 0.039994111357535424,
"acc_norm": 0.6458333333333334,
"acc_norm_stderr": 0.039994111357535424
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956911,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956911
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709390974,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709390974
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5895953757225434,
"acc_stderr": 0.03750757044895536,
"acc_norm": 0.5895953757225434,
"acc_norm_stderr": 0.03750757044895536
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3137254901960784,
"acc_stderr": 0.04617034827006717,
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.04617034827006717
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.77,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.77,
"acc_norm_stderr": 0.04229525846816506
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.4978723404255319,
"acc_stderr": 0.03268572658667492,
"acc_norm": 0.4978723404255319,
"acc_norm_stderr": 0.03268572658667492
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4649122807017544,
"acc_stderr": 0.04692008381368909,
"acc_norm": 0.4649122807017544,
"acc_norm_stderr": 0.04692008381368909
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5103448275862069,
"acc_stderr": 0.041657747757287644,
"acc_norm": 0.5103448275862069,
"acc_norm_stderr": 0.041657747757287644
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.42328042328042326,
"acc_stderr": 0.025446365634406776,
"acc_norm": 0.42328042328042326,
"acc_norm_stderr": 0.025446365634406776
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.373015873015873,
"acc_stderr": 0.04325506042017086,
"acc_norm": 0.373015873015873,
"acc_norm_stderr": 0.04325506042017086
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.6806451612903226,
"acc_stderr": 0.026522709674667765,
"acc_norm": 0.6806451612903226,
"acc_norm_stderr": 0.026522709674667765
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.46798029556650245,
"acc_stderr": 0.03510766597959217,
"acc_norm": 0.46798029556650245,
"acc_norm_stderr": 0.03510766597959217
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.64,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.64,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7393939393939394,
"acc_stderr": 0.034277431758165236,
"acc_norm": 0.7393939393939394,
"acc_norm_stderr": 0.034277431758165236
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.03173071239071724,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.03173071239071724
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8497409326424871,
"acc_stderr": 0.025787723180723872,
"acc_norm": 0.8497409326424871,
"acc_norm_stderr": 0.025787723180723872
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.5512820512820513,
"acc_stderr": 0.025217315184846486,
"acc_norm": 0.5512820512820513,
"acc_norm_stderr": 0.025217315184846486
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2814814814814815,
"acc_stderr": 0.02742001935094528,
"acc_norm": 0.2814814814814815,
"acc_norm_stderr": 0.02742001935094528
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.5672268907563025,
"acc_stderr": 0.032183581077426124,
"acc_norm": 0.5672268907563025,
"acc_norm_stderr": 0.032183581077426124
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3443708609271523,
"acc_stderr": 0.03879687024073327,
"acc_norm": 0.3443708609271523,
"acc_norm_stderr": 0.03879687024073327
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7724770642201835,
"acc_stderr": 0.017974463578776502,
"acc_norm": 0.7724770642201835,
"acc_norm_stderr": 0.017974463578776502
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.39814814814814814,
"acc_stderr": 0.033384734032074016,
"acc_norm": 0.39814814814814814,
"acc_norm_stderr": 0.033384734032074016
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7696078431372549,
"acc_stderr": 0.02955429260569507,
"acc_norm": 0.7696078431372549,
"acc_norm_stderr": 0.02955429260569507
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7848101265822784,
"acc_stderr": 0.026750826994676177,
"acc_norm": 0.7848101265822784,
"acc_norm_stderr": 0.026750826994676177
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.726457399103139,
"acc_stderr": 0.029918586707798834,
"acc_norm": 0.726457399103139,
"acc_norm_stderr": 0.029918586707798834
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6564885496183206,
"acc_stderr": 0.041649760719448786,
"acc_norm": 0.6564885496183206,
"acc_norm_stderr": 0.041649760719448786
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8429752066115702,
"acc_stderr": 0.03321244842547128,
"acc_norm": 0.8429752066115702,
"acc_norm_stderr": 0.03321244842547128
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7037037037037037,
"acc_stderr": 0.044143436668549335,
"acc_norm": 0.7037037037037037,
"acc_norm_stderr": 0.044143436668549335
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7668711656441718,
"acc_stderr": 0.0332201579577674,
"acc_norm": 0.7668711656441718,
"acc_norm_stderr": 0.0332201579577674
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4642857142857143,
"acc_stderr": 0.04733667890053756,
"acc_norm": 0.4642857142857143,
"acc_norm_stderr": 0.04733667890053756
},
"harness|hendrycksTest-management|5": {
"acc": 0.6990291262135923,
"acc_stderr": 0.04541609446503948,
"acc_norm": 0.6990291262135923,
"acc_norm_stderr": 0.04541609446503948
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8333333333333334,
"acc_stderr": 0.024414947304543678,
"acc_norm": 0.8333333333333334,
"acc_norm_stderr": 0.024414947304543678
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7956577266922095,
"acc_stderr": 0.014419123980931895,
"acc_norm": 0.7956577266922095,
"acc_norm_stderr": 0.014419123980931895
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6936416184971098,
"acc_stderr": 0.024818350129436593,
"acc_norm": 0.6936416184971098,
"acc_norm_stderr": 0.024818350129436593
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.376536312849162,
"acc_stderr": 0.016204672385106603,
"acc_norm": 0.376536312849162,
"acc_norm_stderr": 0.016204672385106603
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6470588235294118,
"acc_stderr": 0.027363593284684972,
"acc_norm": 0.6470588235294118,
"acc_norm_stderr": 0.027363593284684972
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7041800643086816,
"acc_stderr": 0.025922371788818763,
"acc_norm": 0.7041800643086816,
"acc_norm_stderr": 0.025922371788818763
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7129629629629629,
"acc_stderr": 0.02517104191530968,
"acc_norm": 0.7129629629629629,
"acc_norm_stderr": 0.02517104191530968
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4858156028368794,
"acc_stderr": 0.02981549448368206,
"acc_norm": 0.4858156028368794,
"acc_norm_stderr": 0.02981549448368206
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.44589308996088656,
"acc_stderr": 0.012695244711379774,
"acc_norm": 0.44589308996088656,
"acc_norm_stderr": 0.012695244711379774
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.5919117647058824,
"acc_stderr": 0.029855261393483924,
"acc_norm": 0.5919117647058824,
"acc_norm_stderr": 0.029855261393483924
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6209150326797386,
"acc_stderr": 0.01962744474841223,
"acc_norm": 0.6209150326797386,
"acc_norm_stderr": 0.01962744474841223
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6272727272727273,
"acc_stderr": 0.04631381319425465,
"acc_norm": 0.6272727272727273,
"acc_norm_stderr": 0.04631381319425465
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6244897959183674,
"acc_stderr": 0.03100120903989484,
"acc_norm": 0.6244897959183674,
"acc_norm_stderr": 0.03100120903989484
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.7512437810945274,
"acc_stderr": 0.030567675938916714,
"acc_norm": 0.7512437810945274,
"acc_norm_stderr": 0.030567675938916714
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-virology|5": {
"acc": 0.4879518072289157,
"acc_stderr": 0.0389136449583582,
"acc_norm": 0.4879518072289157,
"acc_norm_stderr": 0.0389136449583582
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.031885780176863984,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.031885780176863984
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3292533659730722,
"mc1_stderr": 0.016451264440068232,
"mc2": 0.48869138188349615,
"mc2_stderr": 0.0147358552004315
},
"harness|winogrande|5": {
"acc": 0.7861089187056038,
"acc_stderr": 0.011524466954090254
},
"harness|drop|3": {
"em": 0.0036703020134228187,
"em_stderr": 0.0006192871806511272,
"f1": 0.06589450503355675,
"f1_stderr": 0.0014663770308574477
},
"harness|gsm8k|5": {
"acc": 0.12585291887793784,
"acc_stderr": 0.009136212598406307
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.6979018449783325,
-0.8295760154724121,
0.2768690288066864,
0.22179469466209412,
-0.1990787386894226,
-0.054284900426864624,
-0.005560975056141615,
-0.1998685598373413,
0.5625192523002625,
-0.031085945665836334,
-0.46863144636154175,
-0.7119648456573486,
-0.4491538405418396,
0.2467255294... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
epsilon3/vulnresearch | epsilon3 | 2023-11-18T19:25:00Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T19:25:00Z | 2023-11-18T19:25:00.000Z | 2023-11-18T19:25:00 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DanielaTocua/isi-project | DanielaTocua | 2023-11-18T19:46:21Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T19:46:21Z | 2023-11-18T19:35:38.000Z | 2023-11-18T19:35:38 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_ajibawa-2023__Uncensored-Jordan-7B_public | open-llm-leaderboard | 2023-11-18T19:40:39Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T19:40:39Z | 2023-11-18T19:39:52.000Z | 2023-11-18T19:39:52 | ---
pretty_name: Evaluation run of ajibawa-2023/Uncensored-Jordan-7B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [ajibawa-2023/Uncensored-Jordan-7B](https://huggingface.co/ajibawa-2023/Uncensored-Jordan-7B)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ajibawa-2023__Uncensored-Jordan-7B_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T19:37:27.743703](https://huggingface.co/datasets/open-llm-leaderboard/details_ajibawa-2023__Uncensored-Jordan-7B_public/blob/main/results_2023-11-18T19-37-27.743703.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4574910452896481,\n\
\ \"acc_stderr\": 0.03440657715128802,\n \"acc_norm\": 0.4632598229794625,\n\
\ \"acc_norm_stderr\": 0.03522730896735207,\n \"mc1\": 0.32558139534883723,\n\
\ \"mc1_stderr\": 0.01640398946990783,\n \"mc2\": 0.47497547233950527,\n\
\ \"mc2_stderr\": 0.01568331719502122,\n \"em\": 0.2236786912751678,\n\
\ \"em_stderr\": 0.004267491957607617,\n \"f1\": 0.2846486996644306,\n\
\ \"f1_stderr\": 0.00427403120655588\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.49573378839590443,\n \"acc_stderr\": 0.014610858923956955,\n\
\ \"acc_norm\": 0.5127986348122867,\n \"acc_norm_stderr\": 0.014606603181012538\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5867357100179247,\n\
\ \"acc_stderr\": 0.0049141308554317776,\n \"acc_norm\": 0.7736506671977693,\n\
\ \"acc_norm_stderr\": 0.004176125850955359\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \
\ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.45185185185185184,\n\
\ \"acc_stderr\": 0.04299268905480864,\n \"acc_norm\": 0.45185185185185184,\n\
\ \"acc_norm_stderr\": 0.04299268905480864\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.42105263157894735,\n \"acc_stderr\": 0.04017901275981749,\n\
\ \"acc_norm\": 0.42105263157894735,\n \"acc_norm_stderr\": 0.04017901275981749\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.49,\n\
\ \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \
\ \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.5056603773584906,\n \"acc_stderr\": 0.030770900763851302,\n\
\ \"acc_norm\": 0.5056603773584906,\n \"acc_norm_stderr\": 0.030770900763851302\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4791666666666667,\n\
\ \"acc_stderr\": 0.041775789507399935,\n \"acc_norm\": 0.4791666666666667,\n\
\ \"acc_norm_stderr\": 0.041775789507399935\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.42,\n \"acc_stderr\": 0.04960449637488584,\n \
\ \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.04960449637488584\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"\
acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3988439306358382,\n\
\ \"acc_stderr\": 0.037336266553835096,\n \"acc_norm\": 0.3988439306358382,\n\
\ \"acc_norm_stderr\": 0.037336266553835096\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.044405219061793275,\n\
\ \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.044405219061793275\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n\
\ \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.3829787234042553,\n \"acc_stderr\": 0.03177821250236922,\n\
\ \"acc_norm\": 0.3829787234042553,\n \"acc_norm_stderr\": 0.03177821250236922\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.22807017543859648,\n\
\ \"acc_stderr\": 0.03947152782669415,\n \"acc_norm\": 0.22807017543859648,\n\
\ \"acc_norm_stderr\": 0.03947152782669415\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.4413793103448276,\n \"acc_stderr\": 0.04137931034482757,\n\
\ \"acc_norm\": 0.4413793103448276,\n \"acc_norm_stderr\": 0.04137931034482757\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.2830687830687831,\n \"acc_stderr\": 0.023201392938194974,\n \"\
acc_norm\": 0.2830687830687831,\n \"acc_norm_stderr\": 0.023201392938194974\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2698412698412698,\n\
\ \"acc_stderr\": 0.03970158273235173,\n \"acc_norm\": 0.2698412698412698,\n\
\ \"acc_norm_stderr\": 0.03970158273235173\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5032258064516129,\n\
\ \"acc_stderr\": 0.028443414226438323,\n \"acc_norm\": 0.5032258064516129,\n\
\ \"acc_norm_stderr\": 0.028443414226438323\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.32019704433497537,\n \"acc_stderr\": 0.032826493853041504,\n\
\ \"acc_norm\": 0.32019704433497537,\n \"acc_norm_stderr\": 0.032826493853041504\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\"\
: 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.5757575757575758,\n \"acc_stderr\": 0.03859268142070265,\n\
\ \"acc_norm\": 0.5757575757575758,\n \"acc_norm_stderr\": 0.03859268142070265\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.5656565656565656,\n \"acc_stderr\": 0.03531505879359183,\n \"\
acc_norm\": 0.5656565656565656,\n \"acc_norm_stderr\": 0.03531505879359183\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.6528497409326425,\n \"acc_stderr\": 0.03435696168361355,\n\
\ \"acc_norm\": 0.6528497409326425,\n \"acc_norm_stderr\": 0.03435696168361355\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.4358974358974359,\n \"acc_stderr\": 0.02514180151117749,\n \
\ \"acc_norm\": 0.4358974358974359,\n \"acc_norm_stderr\": 0.02514180151117749\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.23703703703703705,\n \"acc_stderr\": 0.025928876132766114,\n \
\ \"acc_norm\": 0.23703703703703705,\n \"acc_norm_stderr\": 0.025928876132766114\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.39915966386554624,\n \"acc_stderr\": 0.03181110032413925,\n\
\ \"acc_norm\": 0.39915966386554624,\n \"acc_norm_stderr\": 0.03181110032413925\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.31788079470198677,\n \"acc_stderr\": 0.03802039760107903,\n \"\
acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.03802039760107903\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.5486238532110091,\n \"acc_stderr\": 0.02133571471126879,\n \"\
acc_norm\": 0.5486238532110091,\n \"acc_norm_stderr\": 0.02133571471126879\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.4444444444444444,\n \"acc_stderr\": 0.03388857118502326,\n \"\
acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.03388857118502326\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.6029411764705882,\n \"acc_stderr\": 0.03434131164719129,\n \"\
acc_norm\": 0.6029411764705882,\n \"acc_norm_stderr\": 0.03434131164719129\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.5822784810126582,\n \"acc_stderr\": 0.032103530322412685,\n \
\ \"acc_norm\": 0.5822784810126582,\n \"acc_norm_stderr\": 0.032103530322412685\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5381165919282511,\n\
\ \"acc_stderr\": 0.033460150119732274,\n \"acc_norm\": 0.5381165919282511,\n\
\ \"acc_norm_stderr\": 0.033460150119732274\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.4961832061068702,\n \"acc_stderr\": 0.043851623256015534,\n\
\ \"acc_norm\": 0.4961832061068702,\n \"acc_norm_stderr\": 0.043851623256015534\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.5950413223140496,\n \"acc_stderr\": 0.04481137755942469,\n \"\
acc_norm\": 0.5950413223140496,\n \"acc_norm_stderr\": 0.04481137755942469\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5648148148148148,\n\
\ \"acc_stderr\": 0.04792898170907061,\n \"acc_norm\": 0.5648148148148148,\n\
\ \"acc_norm_stderr\": 0.04792898170907061\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.5153374233128835,\n \"acc_stderr\": 0.039265223787088445,\n\
\ \"acc_norm\": 0.5153374233128835,\n \"acc_norm_stderr\": 0.039265223787088445\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n\
\ \"acc_stderr\": 0.04203277291467764,\n \"acc_norm\": 0.26785714285714285,\n\
\ \"acc_norm_stderr\": 0.04203277291467764\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.5631067961165048,\n \"acc_stderr\": 0.04911147107365777,\n\
\ \"acc_norm\": 0.5631067961165048,\n \"acc_norm_stderr\": 0.04911147107365777\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.688034188034188,\n\
\ \"acc_stderr\": 0.03035152732334493,\n \"acc_norm\": 0.688034188034188,\n\
\ \"acc_norm_stderr\": 0.03035152732334493\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \
\ \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6462324393358876,\n\
\ \"acc_stderr\": 0.017098184708161906,\n \"acc_norm\": 0.6462324393358876,\n\
\ \"acc_norm_stderr\": 0.017098184708161906\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.5173410404624278,\n \"acc_stderr\": 0.026902900458666647,\n\
\ \"acc_norm\": 0.5173410404624278,\n \"acc_norm_stderr\": 0.026902900458666647\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2435754189944134,\n\
\ \"acc_stderr\": 0.014355911964767864,\n \"acc_norm\": 0.2435754189944134,\n\
\ \"acc_norm_stderr\": 0.014355911964767864\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.4673202614379085,\n \"acc_stderr\": 0.02856869975222587,\n\
\ \"acc_norm\": 0.4673202614379085,\n \"acc_norm_stderr\": 0.02856869975222587\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.49517684887459806,\n\
\ \"acc_stderr\": 0.028396770444111298,\n \"acc_norm\": 0.49517684887459806,\n\
\ \"acc_norm_stderr\": 0.028396770444111298\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.5123456790123457,\n \"acc_stderr\": 0.027812262269327228,\n\
\ \"acc_norm\": 0.5123456790123457,\n \"acc_norm_stderr\": 0.027812262269327228\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.34397163120567376,\n \"acc_stderr\": 0.02833801742861132,\n \
\ \"acc_norm\": 0.34397163120567376,\n \"acc_norm_stderr\": 0.02833801742861132\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.34615384615384615,\n\
\ \"acc_stderr\": 0.012150699768228579,\n \"acc_norm\": 0.34615384615384615,\n\
\ \"acc_norm_stderr\": 0.012150699768228579\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.4632352941176471,\n \"acc_stderr\": 0.030290619180485697,\n\
\ \"acc_norm\": 0.4632352941176471,\n \"acc_norm_stderr\": 0.030290619180485697\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.41830065359477125,\n \"acc_stderr\": 0.019955975145835542,\n \
\ \"acc_norm\": 0.41830065359477125,\n \"acc_norm_stderr\": 0.019955975145835542\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.4909090909090909,\n\
\ \"acc_stderr\": 0.04788339768702861,\n \"acc_norm\": 0.4909090909090909,\n\
\ \"acc_norm_stderr\": 0.04788339768702861\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.4857142857142857,\n \"acc_stderr\": 0.03199615232806286,\n\
\ \"acc_norm\": 0.4857142857142857,\n \"acc_norm_stderr\": 0.03199615232806286\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6218905472636815,\n\
\ \"acc_stderr\": 0.034288678487786564,\n \"acc_norm\": 0.6218905472636815,\n\
\ \"acc_norm_stderr\": 0.034288678487786564\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \
\ \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.35542168674698793,\n\
\ \"acc_stderr\": 0.03726214354322416,\n \"acc_norm\": 0.35542168674698793,\n\
\ \"acc_norm_stderr\": 0.03726214354322416\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.6374269005847953,\n \"acc_stderr\": 0.0368713061556206,\n\
\ \"acc_norm\": 0.6374269005847953,\n \"acc_norm_stderr\": 0.0368713061556206\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.32558139534883723,\n\
\ \"mc1_stderr\": 0.01640398946990783,\n \"mc2\": 0.47497547233950527,\n\
\ \"mc2_stderr\": 0.01568331719502122\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7111286503551697,\n \"acc_stderr\": 0.012738241271018434\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.2236786912751678,\n \
\ \"em_stderr\": 0.004267491957607617,\n \"f1\": 0.2846486996644306,\n \
\ \"f1_stderr\": 0.00427403120655588\n },\n \"harness|gsm8k|5\": {\n\
\ \"acc\": 0.06747536012130402,\n \"acc_stderr\": 0.006909475136357452\n\
\ }\n}\n```"
repo_url: https://huggingface.co/ajibawa-2023/Uncensored-Jordan-7B
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|arc:challenge|25_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|drop|3_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|gsm8k|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hellaswag|10_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-37-27.743703.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T19-37-27.743703.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- '**/details_harness|winogrande|5_2023-11-18T19-37-27.743703.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T19-37-27.743703.parquet'
- config_name: results
data_files:
- split: 2023_11_18T19_37_27.743703
path:
- results_2023-11-18T19-37-27.743703.parquet
- split: latest
path:
- results_2023-11-18T19-37-27.743703.parquet
---
# Dataset Card for Evaluation run of ajibawa-2023/Uncensored-Jordan-7B
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/ajibawa-2023/Uncensored-Jordan-7B
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [ajibawa-2023/Uncensored-Jordan-7B](https://huggingface.co/ajibawa-2023/Uncensored-Jordan-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_ajibawa-2023__Uncensored-Jordan-7B_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T19:37:27.743703](https://huggingface.co/datasets/open-llm-leaderboard/details_ajibawa-2023__Uncensored-Jordan-7B_public/blob/main/results_2023-11-18T19-37-27.743703.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.4574910452896481,
"acc_stderr": 0.03440657715128802,
"acc_norm": 0.4632598229794625,
"acc_norm_stderr": 0.03522730896735207,
"mc1": 0.32558139534883723,
"mc1_stderr": 0.01640398946990783,
"mc2": 0.47497547233950527,
"mc2_stderr": 0.01568331719502122,
"em": 0.2236786912751678,
"em_stderr": 0.004267491957607617,
"f1": 0.2846486996644306,
"f1_stderr": 0.00427403120655588
},
"harness|arc:challenge|25": {
"acc": 0.49573378839590443,
"acc_stderr": 0.014610858923956955,
"acc_norm": 0.5127986348122867,
"acc_norm_stderr": 0.014606603181012538
},
"harness|hellaswag|10": {
"acc": 0.5867357100179247,
"acc_stderr": 0.0049141308554317776,
"acc_norm": 0.7736506671977693,
"acc_norm_stderr": 0.004176125850955359
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.45185185185185184,
"acc_stderr": 0.04299268905480864,
"acc_norm": 0.45185185185185184,
"acc_norm_stderr": 0.04299268905480864
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.42105263157894735,
"acc_stderr": 0.04017901275981749,
"acc_norm": 0.42105263157894735,
"acc_norm_stderr": 0.04017901275981749
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.5056603773584906,
"acc_stderr": 0.030770900763851302,
"acc_norm": 0.5056603773584906,
"acc_norm_stderr": 0.030770900763851302
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.4791666666666667,
"acc_stderr": 0.041775789507399935,
"acc_norm": 0.4791666666666667,
"acc_norm_stderr": 0.041775789507399935
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.42,
"acc_stderr": 0.04960449637488584,
"acc_norm": 0.42,
"acc_norm_stderr": 0.04960449637488584
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.45,
"acc_stderr": 0.05,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.3988439306358382,
"acc_stderr": 0.037336266553835096,
"acc_norm": 0.3988439306358382,
"acc_norm_stderr": 0.037336266553835096
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.27450980392156865,
"acc_stderr": 0.044405219061793275,
"acc_norm": 0.27450980392156865,
"acc_norm_stderr": 0.044405219061793275
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.3829787234042553,
"acc_stderr": 0.03177821250236922,
"acc_norm": 0.3829787234042553,
"acc_norm_stderr": 0.03177821250236922
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.22807017543859648,
"acc_stderr": 0.03947152782669415,
"acc_norm": 0.22807017543859648,
"acc_norm_stderr": 0.03947152782669415
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.4413793103448276,
"acc_stderr": 0.04137931034482757,
"acc_norm": 0.4413793103448276,
"acc_norm_stderr": 0.04137931034482757
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.2830687830687831,
"acc_stderr": 0.023201392938194974,
"acc_norm": 0.2830687830687831,
"acc_norm_stderr": 0.023201392938194974
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.2698412698412698,
"acc_stderr": 0.03970158273235173,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.03970158273235173
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.5032258064516129,
"acc_stderr": 0.028443414226438323,
"acc_norm": 0.5032258064516129,
"acc_norm_stderr": 0.028443414226438323
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.32019704433497537,
"acc_stderr": 0.032826493853041504,
"acc_norm": 0.32019704433497537,
"acc_norm_stderr": 0.032826493853041504
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237102
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.5757575757575758,
"acc_stderr": 0.03859268142070265,
"acc_norm": 0.5757575757575758,
"acc_norm_stderr": 0.03859268142070265
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.5656565656565656,
"acc_stderr": 0.03531505879359183,
"acc_norm": 0.5656565656565656,
"acc_norm_stderr": 0.03531505879359183
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.6528497409326425,
"acc_stderr": 0.03435696168361355,
"acc_norm": 0.6528497409326425,
"acc_norm_stderr": 0.03435696168361355
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.4358974358974359,
"acc_stderr": 0.02514180151117749,
"acc_norm": 0.4358974358974359,
"acc_norm_stderr": 0.02514180151117749
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.23703703703703705,
"acc_stderr": 0.025928876132766114,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.025928876132766114
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.39915966386554624,
"acc_stderr": 0.03181110032413925,
"acc_norm": 0.39915966386554624,
"acc_norm_stderr": 0.03181110032413925
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.31788079470198677,
"acc_stderr": 0.03802039760107903,
"acc_norm": 0.31788079470198677,
"acc_norm_stderr": 0.03802039760107903
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.5486238532110091,
"acc_stderr": 0.02133571471126879,
"acc_norm": 0.5486238532110091,
"acc_norm_stderr": 0.02133571471126879
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.03388857118502326,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.03388857118502326
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.6029411764705882,
"acc_stderr": 0.03434131164719129,
"acc_norm": 0.6029411764705882,
"acc_norm_stderr": 0.03434131164719129
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.5822784810126582,
"acc_stderr": 0.032103530322412685,
"acc_norm": 0.5822784810126582,
"acc_norm_stderr": 0.032103530322412685
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.5381165919282511,
"acc_stderr": 0.033460150119732274,
"acc_norm": 0.5381165919282511,
"acc_norm_stderr": 0.033460150119732274
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.4961832061068702,
"acc_stderr": 0.043851623256015534,
"acc_norm": 0.4961832061068702,
"acc_norm_stderr": 0.043851623256015534
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.5950413223140496,
"acc_stderr": 0.04481137755942469,
"acc_norm": 0.5950413223140496,
"acc_norm_stderr": 0.04481137755942469
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.5648148148148148,
"acc_stderr": 0.04792898170907061,
"acc_norm": 0.5648148148148148,
"acc_norm_stderr": 0.04792898170907061
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.5153374233128835,
"acc_stderr": 0.039265223787088445,
"acc_norm": 0.5153374233128835,
"acc_norm_stderr": 0.039265223787088445
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.26785714285714285,
"acc_stderr": 0.04203277291467764,
"acc_norm": 0.26785714285714285,
"acc_norm_stderr": 0.04203277291467764
},
"harness|hendrycksTest-management|5": {
"acc": 0.5631067961165048,
"acc_stderr": 0.04911147107365777,
"acc_norm": 0.5631067961165048,
"acc_norm_stderr": 0.04911147107365777
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.688034188034188,
"acc_stderr": 0.03035152732334493,
"acc_norm": 0.688034188034188,
"acc_norm_stderr": 0.03035152732334493
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.6462324393358876,
"acc_stderr": 0.017098184708161906,
"acc_norm": 0.6462324393358876,
"acc_norm_stderr": 0.017098184708161906
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.5173410404624278,
"acc_stderr": 0.026902900458666647,
"acc_norm": 0.5173410404624278,
"acc_norm_stderr": 0.026902900458666647
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.2435754189944134,
"acc_stderr": 0.014355911964767864,
"acc_norm": 0.2435754189944134,
"acc_norm_stderr": 0.014355911964767864
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.4673202614379085,
"acc_stderr": 0.02856869975222587,
"acc_norm": 0.4673202614379085,
"acc_norm_stderr": 0.02856869975222587
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.49517684887459806,
"acc_stderr": 0.028396770444111298,
"acc_norm": 0.49517684887459806,
"acc_norm_stderr": 0.028396770444111298
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.5123456790123457,
"acc_stderr": 0.027812262269327228,
"acc_norm": 0.5123456790123457,
"acc_norm_stderr": 0.027812262269327228
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.34397163120567376,
"acc_stderr": 0.02833801742861132,
"acc_norm": 0.34397163120567376,
"acc_norm_stderr": 0.02833801742861132
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.34615384615384615,
"acc_stderr": 0.012150699768228579,
"acc_norm": 0.34615384615384615,
"acc_norm_stderr": 0.012150699768228579
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.4632352941176471,
"acc_stderr": 0.030290619180485697,
"acc_norm": 0.4632352941176471,
"acc_norm_stderr": 0.030290619180485697
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.41830065359477125,
"acc_stderr": 0.019955975145835542,
"acc_norm": 0.41830065359477125,
"acc_norm_stderr": 0.019955975145835542
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.4909090909090909,
"acc_stderr": 0.04788339768702861,
"acc_norm": 0.4909090909090909,
"acc_norm_stderr": 0.04788339768702861
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.4857142857142857,
"acc_stderr": 0.03199615232806286,
"acc_norm": 0.4857142857142857,
"acc_norm_stderr": 0.03199615232806286
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.6218905472636815,
"acc_stderr": 0.034288678487786564,
"acc_norm": 0.6218905472636815,
"acc_norm_stderr": 0.034288678487786564
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.68,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.68,
"acc_norm_stderr": 0.046882617226215034
},
"harness|hendrycksTest-virology|5": {
"acc": 0.35542168674698793,
"acc_stderr": 0.03726214354322416,
"acc_norm": 0.35542168674698793,
"acc_norm_stderr": 0.03726214354322416
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.6374269005847953,
"acc_stderr": 0.0368713061556206,
"acc_norm": 0.6374269005847953,
"acc_norm_stderr": 0.0368713061556206
},
"harness|truthfulqa:mc|0": {
"mc1": 0.32558139534883723,
"mc1_stderr": 0.01640398946990783,
"mc2": 0.47497547233950527,
"mc2_stderr": 0.01568331719502122
},
"harness|winogrande|5": {
"acc": 0.7111286503551697,
"acc_stderr": 0.012738241271018434
},
"harness|drop|3": {
"em": 0.2236786912751678,
"em_stderr": 0.004267491957607617,
"f1": 0.2846486996644306,
"f1_stderr": 0.00427403120655588
},
"harness|gsm8k|5": {
"acc": 0.06747536012130402,
"acc_stderr": 0.006909475136357452
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7507162690162659,
-0.845665454864502,
0.25216373801231384,
0.2441135197877884,
-0.2034958004951477,
-0.05673279985785484,
0.010057740844786167,
-0.22563153505325317,
0.5535466074943542,
-0.003257784293964505,
-0.47078800201416016,
-0.7181340456008911,
-0.47474026679992676,
0.24347992241... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DanielaTocua/isi-project-2023 | DanielaTocua | 2023-11-18T19:44:05Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T19:44:05Z | 2023-11-18T19:44:05.000Z | 2023-11-18T19:44:05 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-20b-instruct_public | open-llm-leaderboard | 2023-11-18T19:58:56Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T19:58:56Z | 2023-11-18T19:58:04.000Z | 2023-11-18T19:58:04 | ---
pretty_name: Evaluation run of AI-Sweden-Models/gpt-sw3-20b-instruct
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [AI-Sweden-Models/gpt-sw3-20b-instruct](https://huggingface.co/AI-Sweden-Models/gpt-sw3-20b-instruct)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-20b-instruct_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T19:55:37.406086](https://huggingface.co/datasets/open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-20b-instruct_public/blob/main/results_2023-11-18T19-55-37.406086.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.320676822366553,\n\
\ \"acc_stderr\": 0.03280575656070825,\n \"acc_norm\": 0.3219577309602372,\n\
\ \"acc_norm_stderr\": 0.03355424008379778,\n \"mc1\": 0.25458996328029376,\n\
\ \"mc1_stderr\": 0.015250117079156494,\n \"mc2\": 0.4101922343540469,\n\
\ \"mc2_stderr\": 0.014529149906569373,\n \"em\": 0.014471476510067114,\n\
\ \"em_stderr\": 0.0012230118709417176,\n \"f1\": 0.051461828859060935,\n\
\ \"f1_stderr\": 0.0016503207117057528\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.42150170648464164,\n \"acc_stderr\": 0.014430197069326012,\n\
\ \"acc_norm\": 0.431740614334471,\n \"acc_norm_stderr\": 0.014474591427196207\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5312686715793666,\n\
\ \"acc_stderr\": 0.004980014536539819,\n \"acc_norm\": 0.7109141605257917,\n\
\ \"acc_norm_stderr\": 0.004524113671259695\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \
\ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3333333333333333,\n\
\ \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.3333333333333333,\n\
\ \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.2565789473684211,\n \"acc_stderr\": 0.0355418036802569,\n\
\ \"acc_norm\": 0.2565789473684211,\n \"acc_norm_stderr\": 0.0355418036802569\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.33,\n\
\ \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \
\ \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.3283018867924528,\n \"acc_stderr\": 0.02890159361241178,\n\
\ \"acc_norm\": 0.3283018867924528,\n \"acc_norm_stderr\": 0.02890159361241178\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n\
\ \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n\
\ \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \
\ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.23,\n \"acc_stderr\": 0.042295258468165044,\n \"acc_norm\"\
: 0.23,\n \"acc_norm_stderr\": 0.042295258468165044\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2658959537572254,\n\
\ \"acc_stderr\": 0.033687629322594316,\n \"acc_norm\": 0.2658959537572254,\n\
\ \"acc_norm_stderr\": 0.033687629322594316\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.16666666666666666,\n \"acc_stderr\": 0.03708284662416544,\n\
\ \"acc_norm\": 0.16666666666666666,\n \"acc_norm_stderr\": 0.03708284662416544\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n\
\ \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.34893617021276596,\n \"acc_stderr\": 0.031158522131357783,\n\
\ \"acc_norm\": 0.34893617021276596,\n \"acc_norm_stderr\": 0.031158522131357783\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n\
\ \"acc_stderr\": 0.04185774424022056,\n \"acc_norm\": 0.2719298245614035,\n\
\ \"acc_norm_stderr\": 0.04185774424022056\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.3310344827586207,\n \"acc_stderr\": 0.03921545312467122,\n\
\ \"acc_norm\": 0.3310344827586207,\n \"acc_norm_stderr\": 0.03921545312467122\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.25925925925925924,\n \"acc_stderr\": 0.02256989707491841,\n \"\
acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.02256989707491841\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.20634920634920634,\n\
\ \"acc_stderr\": 0.036196045241242515,\n \"acc_norm\": 0.20634920634920634,\n\
\ \"acc_norm_stderr\": 0.036196045241242515\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.3161290322580645,\n\
\ \"acc_stderr\": 0.026450874489042774,\n \"acc_norm\": 0.3161290322580645,\n\
\ \"acc_norm_stderr\": 0.026450874489042774\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.31527093596059114,\n \"acc_stderr\": 0.03269080871970187,\n\
\ \"acc_norm\": 0.31527093596059114,\n \"acc_norm_stderr\": 0.03269080871970187\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\"\
: 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.41818181818181815,\n \"acc_stderr\": 0.03851716319398394,\n\
\ \"acc_norm\": 0.41818181818181815,\n \"acc_norm_stderr\": 0.03851716319398394\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.29797979797979796,\n \"acc_stderr\": 0.03258630383836556,\n \"\
acc_norm\": 0.29797979797979796,\n \"acc_norm_stderr\": 0.03258630383836556\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.33678756476683935,\n \"acc_stderr\": 0.03410780251836184,\n\
\ \"acc_norm\": 0.33678756476683935,\n \"acc_norm_stderr\": 0.03410780251836184\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.26153846153846155,\n \"acc_stderr\": 0.02228214120420442,\n\
\ \"acc_norm\": 0.26153846153846155,\n \"acc_norm_stderr\": 0.02228214120420442\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.2222222222222222,\n \"acc_stderr\": 0.02534809746809786,\n \
\ \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.02534809746809786\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.27310924369747897,\n \"acc_stderr\": 0.028942004040998167,\n\
\ \"acc_norm\": 0.27310924369747897,\n \"acc_norm_stderr\": 0.028942004040998167\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.2251655629139073,\n \"acc_stderr\": 0.03410435282008936,\n \"\
acc_norm\": 0.2251655629139073,\n \"acc_norm_stderr\": 0.03410435282008936\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.3577981651376147,\n \"acc_stderr\": 0.020552060784827818,\n \"\
acc_norm\": 0.3577981651376147,\n \"acc_norm_stderr\": 0.020552060784827818\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.18981481481481483,\n \"acc_stderr\": 0.026744714834691936,\n \"\
acc_norm\": 0.18981481481481483,\n \"acc_norm_stderr\": 0.026744714834691936\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.35784313725490197,\n \"acc_stderr\": 0.03364487286088299,\n \"\
acc_norm\": 0.35784313725490197,\n \"acc_norm_stderr\": 0.03364487286088299\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.41350210970464135,\n \"acc_stderr\": 0.03205649904851858,\n \
\ \"acc_norm\": 0.41350210970464135,\n \"acc_norm_stderr\": 0.03205649904851858\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.4484304932735426,\n\
\ \"acc_stderr\": 0.03337883736255098,\n \"acc_norm\": 0.4484304932735426,\n\
\ \"acc_norm_stderr\": 0.03337883736255098\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.3282442748091603,\n \"acc_stderr\": 0.04118438565806298,\n\
\ \"acc_norm\": 0.3282442748091603,\n \"acc_norm_stderr\": 0.04118438565806298\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.3884297520661157,\n \"acc_stderr\": 0.044492703500683815,\n \"\
acc_norm\": 0.3884297520661157,\n \"acc_norm_stderr\": 0.044492703500683815\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.37962962962962965,\n\
\ \"acc_stderr\": 0.04691521224077742,\n \"acc_norm\": 0.37962962962962965,\n\
\ \"acc_norm_stderr\": 0.04691521224077742\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.294478527607362,\n \"acc_stderr\": 0.03581165790474082,\n\
\ \"acc_norm\": 0.294478527607362,\n \"acc_norm_stderr\": 0.03581165790474082\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.29464285714285715,\n\
\ \"acc_stderr\": 0.04327040932578729,\n \"acc_norm\": 0.29464285714285715,\n\
\ \"acc_norm_stderr\": 0.04327040932578729\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.34951456310679613,\n \"acc_stderr\": 0.047211885060971716,\n\
\ \"acc_norm\": 0.34951456310679613,\n \"acc_norm_stderr\": 0.047211885060971716\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.4444444444444444,\n\
\ \"acc_stderr\": 0.03255326307272487,\n \"acc_norm\": 0.4444444444444444,\n\
\ \"acc_norm_stderr\": 0.03255326307272487\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \
\ \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.37037037037037035,\n\
\ \"acc_stderr\": 0.017268607560005783,\n \"acc_norm\": 0.37037037037037035,\n\
\ \"acc_norm_stderr\": 0.017268607560005783\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.34104046242774566,\n \"acc_stderr\": 0.02552247463212161,\n\
\ \"acc_norm\": 0.34104046242774566,\n \"acc_norm_stderr\": 0.02552247463212161\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2435754189944134,\n\
\ \"acc_stderr\": 0.014355911964767867,\n \"acc_norm\": 0.2435754189944134,\n\
\ \"acc_norm_stderr\": 0.014355911964767867\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.02609016250427904,\n\
\ \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.02609016250427904\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.27009646302250806,\n\
\ \"acc_stderr\": 0.025218040373410626,\n \"acc_norm\": 0.27009646302250806,\n\
\ \"acc_norm_stderr\": 0.025218040373410626\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.32098765432098764,\n \"acc_stderr\": 0.02597656601086273,\n\
\ \"acc_norm\": 0.32098765432098764,\n \"acc_norm_stderr\": 0.02597656601086273\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.2624113475177305,\n \"acc_stderr\": 0.02624492034984302,\n \
\ \"acc_norm\": 0.2624113475177305,\n \"acc_norm_stderr\": 0.02624492034984302\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.29335071707953064,\n\
\ \"acc_stderr\": 0.011628520449582075,\n \"acc_norm\": 0.29335071707953064,\n\
\ \"acc_norm_stderr\": 0.011628520449582075\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.29044117647058826,\n \"acc_stderr\": 0.027576468622740512,\n\
\ \"acc_norm\": 0.29044117647058826,\n \"acc_norm_stderr\": 0.027576468622740512\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.3022875816993464,\n \"acc_stderr\": 0.018579232711113877,\n \
\ \"acc_norm\": 0.3022875816993464,\n \"acc_norm_stderr\": 0.018579232711113877\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.32727272727272727,\n\
\ \"acc_stderr\": 0.04494290866252089,\n \"acc_norm\": 0.32727272727272727,\n\
\ \"acc_norm_stderr\": 0.04494290866252089\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.2653061224489796,\n \"acc_stderr\": 0.028263889943784606,\n\
\ \"acc_norm\": 0.2653061224489796,\n \"acc_norm_stderr\": 0.028263889943784606\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.34328358208955223,\n\
\ \"acc_stderr\": 0.03357379665433431,\n \"acc_norm\": 0.34328358208955223,\n\
\ \"acc_norm_stderr\": 0.03357379665433431\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \
\ \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3373493975903614,\n\
\ \"acc_stderr\": 0.03680783690727581,\n \"acc_norm\": 0.3373493975903614,\n\
\ \"acc_norm_stderr\": 0.03680783690727581\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.36257309941520466,\n \"acc_stderr\": 0.036871306155620606,\n\
\ \"acc_norm\": 0.36257309941520466,\n \"acc_norm_stderr\": 0.036871306155620606\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.25458996328029376,\n\
\ \"mc1_stderr\": 0.015250117079156494,\n \"mc2\": 0.4101922343540469,\n\
\ \"mc2_stderr\": 0.014529149906569373\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.6677190213101816,\n \"acc_stderr\": 0.013238316554236525\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.014471476510067114,\n \
\ \"em_stderr\": 0.0012230118709417176,\n \"f1\": 0.051461828859060935,\n\
\ \"f1_stderr\": 0.0016503207117057528\n },\n \"harness|gsm8k|5\":\
\ {\n \"acc\": 0.08794541319181198,\n \"acc_stderr\": 0.007801162197487717\n\
\ }\n}\n```"
repo_url: https://huggingface.co/AI-Sweden-Models/gpt-sw3-20b-instruct
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|arc:challenge|25_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|drop|3_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|gsm8k|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hellaswag|10_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-55-37.406086.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T19-55-37.406086.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- '**/details_harness|winogrande|5_2023-11-18T19-55-37.406086.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T19-55-37.406086.parquet'
- config_name: results
data_files:
- split: 2023_11_18T19_55_37.406086
path:
- results_2023-11-18T19-55-37.406086.parquet
- split: latest
path:
- results_2023-11-18T19-55-37.406086.parquet
---
# Dataset Card for Evaluation run of AI-Sweden-Models/gpt-sw3-20b-instruct
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/AI-Sweden-Models/gpt-sw3-20b-instruct
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [AI-Sweden-Models/gpt-sw3-20b-instruct](https://huggingface.co/AI-Sweden-Models/gpt-sw3-20b-instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-20b-instruct_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T19:55:37.406086](https://huggingface.co/datasets/open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-20b-instruct_public/blob/main/results_2023-11-18T19-55-37.406086.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.320676822366553,
"acc_stderr": 0.03280575656070825,
"acc_norm": 0.3219577309602372,
"acc_norm_stderr": 0.03355424008379778,
"mc1": 0.25458996328029376,
"mc1_stderr": 0.015250117079156494,
"mc2": 0.4101922343540469,
"mc2_stderr": 0.014529149906569373,
"em": 0.014471476510067114,
"em_stderr": 0.0012230118709417176,
"f1": 0.051461828859060935,
"f1_stderr": 0.0016503207117057528
},
"harness|arc:challenge|25": {
"acc": 0.42150170648464164,
"acc_stderr": 0.014430197069326012,
"acc_norm": 0.431740614334471,
"acc_norm_stderr": 0.014474591427196207
},
"harness|hellaswag|10": {
"acc": 0.5312686715793666,
"acc_stderr": 0.004980014536539819,
"acc_norm": 0.7109141605257917,
"acc_norm_stderr": 0.004524113671259695
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04072314811876837,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04072314811876837
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.2565789473684211,
"acc_stderr": 0.0355418036802569,
"acc_norm": 0.2565789473684211,
"acc_norm_stderr": 0.0355418036802569
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.3283018867924528,
"acc_stderr": 0.02890159361241178,
"acc_norm": 0.3283018867924528,
"acc_norm_stderr": 0.02890159361241178
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.2569444444444444,
"acc_stderr": 0.03653946969442099,
"acc_norm": 0.2569444444444444,
"acc_norm_stderr": 0.03653946969442099
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.23,
"acc_stderr": 0.042295258468165044,
"acc_norm": 0.23,
"acc_norm_stderr": 0.042295258468165044
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.047609522856952365,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952365
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.2658959537572254,
"acc_stderr": 0.033687629322594316,
"acc_norm": 0.2658959537572254,
"acc_norm_stderr": 0.033687629322594316
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.16666666666666666,
"acc_stderr": 0.03708284662416544,
"acc_norm": 0.16666666666666666,
"acc_norm_stderr": 0.03708284662416544
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.34893617021276596,
"acc_stderr": 0.031158522131357783,
"acc_norm": 0.34893617021276596,
"acc_norm_stderr": 0.031158522131357783
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.2719298245614035,
"acc_stderr": 0.04185774424022056,
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.04185774424022056
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.3310344827586207,
"acc_stderr": 0.03921545312467122,
"acc_norm": 0.3310344827586207,
"acc_norm_stderr": 0.03921545312467122
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.02256989707491841,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.02256989707491841
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.20634920634920634,
"acc_stderr": 0.036196045241242515,
"acc_norm": 0.20634920634920634,
"acc_norm_stderr": 0.036196045241242515
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.3161290322580645,
"acc_stderr": 0.026450874489042774,
"acc_norm": 0.3161290322580645,
"acc_norm_stderr": 0.026450874489042774
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.31527093596059114,
"acc_stderr": 0.03269080871970187,
"acc_norm": 0.31527093596059114,
"acc_norm_stderr": 0.03269080871970187
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.41818181818181815,
"acc_stderr": 0.03851716319398394,
"acc_norm": 0.41818181818181815,
"acc_norm_stderr": 0.03851716319398394
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.29797979797979796,
"acc_stderr": 0.03258630383836556,
"acc_norm": 0.29797979797979796,
"acc_norm_stderr": 0.03258630383836556
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.33678756476683935,
"acc_stderr": 0.03410780251836184,
"acc_norm": 0.33678756476683935,
"acc_norm_stderr": 0.03410780251836184
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.26153846153846155,
"acc_stderr": 0.02228214120420442,
"acc_norm": 0.26153846153846155,
"acc_norm_stderr": 0.02228214120420442
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.02534809746809786,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.02534809746809786
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.27310924369747897,
"acc_stderr": 0.028942004040998167,
"acc_norm": 0.27310924369747897,
"acc_norm_stderr": 0.028942004040998167
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.2251655629139073,
"acc_stderr": 0.03410435282008936,
"acc_norm": 0.2251655629139073,
"acc_norm_stderr": 0.03410435282008936
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.3577981651376147,
"acc_stderr": 0.020552060784827818,
"acc_norm": 0.3577981651376147,
"acc_norm_stderr": 0.020552060784827818
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.18981481481481483,
"acc_stderr": 0.026744714834691936,
"acc_norm": 0.18981481481481483,
"acc_norm_stderr": 0.026744714834691936
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.35784313725490197,
"acc_stderr": 0.03364487286088299,
"acc_norm": 0.35784313725490197,
"acc_norm_stderr": 0.03364487286088299
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.41350210970464135,
"acc_stderr": 0.03205649904851858,
"acc_norm": 0.41350210970464135,
"acc_norm_stderr": 0.03205649904851858
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.4484304932735426,
"acc_stderr": 0.03337883736255098,
"acc_norm": 0.4484304932735426,
"acc_norm_stderr": 0.03337883736255098
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.3282442748091603,
"acc_stderr": 0.04118438565806298,
"acc_norm": 0.3282442748091603,
"acc_norm_stderr": 0.04118438565806298
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.3884297520661157,
"acc_stderr": 0.044492703500683815,
"acc_norm": 0.3884297520661157,
"acc_norm_stderr": 0.044492703500683815
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.37962962962962965,
"acc_stderr": 0.04691521224077742,
"acc_norm": 0.37962962962962965,
"acc_norm_stderr": 0.04691521224077742
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.294478527607362,
"acc_stderr": 0.03581165790474082,
"acc_norm": 0.294478527607362,
"acc_norm_stderr": 0.03581165790474082
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.29464285714285715,
"acc_stderr": 0.04327040932578729,
"acc_norm": 0.29464285714285715,
"acc_norm_stderr": 0.04327040932578729
},
"harness|hendrycksTest-management|5": {
"acc": 0.34951456310679613,
"acc_stderr": 0.047211885060971716,
"acc_norm": 0.34951456310679613,
"acc_norm_stderr": 0.047211885060971716
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.03255326307272487,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.03255326307272487
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.017268607560005783,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.017268607560005783
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.34104046242774566,
"acc_stderr": 0.02552247463212161,
"acc_norm": 0.34104046242774566,
"acc_norm_stderr": 0.02552247463212161
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.2435754189944134,
"acc_stderr": 0.014355911964767867,
"acc_norm": 0.2435754189944134,
"acc_norm_stderr": 0.014355911964767867
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.29411764705882354,
"acc_stderr": 0.02609016250427904,
"acc_norm": 0.29411764705882354,
"acc_norm_stderr": 0.02609016250427904
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.27009646302250806,
"acc_stderr": 0.025218040373410626,
"acc_norm": 0.27009646302250806,
"acc_norm_stderr": 0.025218040373410626
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.32098765432098764,
"acc_stderr": 0.02597656601086273,
"acc_norm": 0.32098765432098764,
"acc_norm_stderr": 0.02597656601086273
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.2624113475177305,
"acc_stderr": 0.02624492034984302,
"acc_norm": 0.2624113475177305,
"acc_norm_stderr": 0.02624492034984302
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.29335071707953064,
"acc_stderr": 0.011628520449582075,
"acc_norm": 0.29335071707953064,
"acc_norm_stderr": 0.011628520449582075
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.29044117647058826,
"acc_stderr": 0.027576468622740512,
"acc_norm": 0.29044117647058826,
"acc_norm_stderr": 0.027576468622740512
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.3022875816993464,
"acc_stderr": 0.018579232711113877,
"acc_norm": 0.3022875816993464,
"acc_norm_stderr": 0.018579232711113877
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.32727272727272727,
"acc_stderr": 0.04494290866252089,
"acc_norm": 0.32727272727272727,
"acc_norm_stderr": 0.04494290866252089
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.2653061224489796,
"acc_stderr": 0.028263889943784606,
"acc_norm": 0.2653061224489796,
"acc_norm_stderr": 0.028263889943784606
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.34328358208955223,
"acc_stderr": 0.03357379665433431,
"acc_norm": 0.34328358208955223,
"acc_norm_stderr": 0.03357379665433431
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-virology|5": {
"acc": 0.3373493975903614,
"acc_stderr": 0.03680783690727581,
"acc_norm": 0.3373493975903614,
"acc_norm_stderr": 0.03680783690727581
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.36257309941520466,
"acc_stderr": 0.036871306155620606,
"acc_norm": 0.36257309941520466,
"acc_norm_stderr": 0.036871306155620606
},
"harness|truthfulqa:mc|0": {
"mc1": 0.25458996328029376,
"mc1_stderr": 0.015250117079156494,
"mc2": 0.4101922343540469,
"mc2_stderr": 0.014529149906569373
},
"harness|winogrande|5": {
"acc": 0.6677190213101816,
"acc_stderr": 0.013238316554236525
},
"harness|drop|3": {
"em": 0.014471476510067114,
"em_stderr": 0.0012230118709417176,
"f1": 0.051461828859060935,
"f1_stderr": 0.0016503207117057528
},
"harness|gsm8k|5": {
"acc": 0.08794541319181198,
"acc_stderr": 0.007801162197487717
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7191528081893921,
-0.8236277103424072,
0.31726527214050293,
0.20273080468177795,
-0.19072134792804718,
-0.07498454302549362,
0.021539704874157906,
-0.24334123730659485,
0.48997238278388977,
-0.05529274418950081,
-0.48525479435920715,
-0.7134705781936646,
-0.4393260180950165,
0.232658252... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_nnpy__Nape-0_public | open-llm-leaderboard | 2023-11-18T20:36:01Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T20:36:01Z | 2023-11-18T20:35:14.000Z | 2023-11-18T20:35:14 | ---
pretty_name: Evaluation run of nnpy/Nape-0
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [nnpy/Nape-0](https://huggingface.co/nnpy/Nape-0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_nnpy__Nape-0_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T20:32:43.776654](https://huggingface.co/datasets/open-llm-leaderboard/details_nnpy__Nape-0_public/blob/main/results_2023-11-18T20-32-43.776654.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2543128094432495,\n\
\ \"acc_stderr\": 0.03062072612679358,\n \"acc_norm\": 0.2558387672983551,\n\
\ \"acc_norm_stderr\": 0.031412385111215566,\n \"mc1\": 0.23745410036719705,\n\
\ \"mc1_stderr\": 0.014896277441041836,\n \"mc2\": 0.3899188336745711,\n\
\ \"mc2_stderr\": 0.014581462986356814,\n \"em\": 0.0017827181208053692,\n\
\ \"em_stderr\": 0.0004320097346039005,\n \"f1\": 0.03894714765100676,\n\
\ \"f1_stderr\": 0.0011174860838397392\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.31143344709897613,\n \"acc_stderr\": 0.013532472099850939,\n\
\ \"acc_norm\": 0.3267918088737201,\n \"acc_norm_stderr\": 0.013706665975587331\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4470225054769966,\n\
\ \"acc_stderr\": 0.004961693567208816,\n \"acc_norm\": 0.5868352917745469,\n\
\ \"acc_norm_stderr\": 0.004913955705080124\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \
\ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.34074074074074073,\n\
\ \"acc_stderr\": 0.04094376269996793,\n \"acc_norm\": 0.34074074074074073,\n\
\ \"acc_norm_stderr\": 0.04094376269996793\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.034597776068105365,\n\
\ \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.034597776068105365\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.2,\n\
\ \"acc_stderr\": 0.040201512610368445,\n \"acc_norm\": 0.2,\n \
\ \"acc_norm_stderr\": 0.040201512610368445\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.26037735849056604,\n \"acc_stderr\": 0.027008766090708097,\n\
\ \"acc_norm\": 0.26037735849056604,\n \"acc_norm_stderr\": 0.027008766090708097\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2152777777777778,\n\
\ \"acc_stderr\": 0.03437079344106132,\n \"acc_norm\": 0.2152777777777778,\n\
\ \"acc_norm_stderr\": 0.03437079344106132\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536955,\n \
\ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536955\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.16,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\"\
: 0.16,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \
\ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.23121387283236994,\n\
\ \"acc_stderr\": 0.03214737302029471,\n \"acc_norm\": 0.23121387283236994,\n\
\ \"acc_norm_stderr\": 0.03214737302029471\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.04280105837364396,\n\
\ \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.04280105837364396\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.24,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.24,\n\
\ \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.251063829787234,\n \"acc_stderr\": 0.028346963777162452,\n\
\ \"acc_norm\": 0.251063829787234,\n \"acc_norm_stderr\": 0.028346963777162452\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n\
\ \"acc_stderr\": 0.041857744240220575,\n \"acc_norm\": 0.2719298245614035,\n\
\ \"acc_norm_stderr\": 0.041857744240220575\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.2896551724137931,\n \"acc_stderr\": 0.03780019230438014,\n\
\ \"acc_norm\": 0.2896551724137931,\n \"acc_norm_stderr\": 0.03780019230438014\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.2671957671957672,\n \"acc_stderr\": 0.02278967314577656,\n \"\
acc_norm\": 0.2671957671957672,\n \"acc_norm_stderr\": 0.02278967314577656\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.14285714285714285,\n\
\ \"acc_stderr\": 0.0312984318574381,\n \"acc_norm\": 0.14285714285714285,\n\
\ \"acc_norm_stderr\": 0.0312984318574381\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.24838709677419354,\n\
\ \"acc_stderr\": 0.02458002892148101,\n \"acc_norm\": 0.24838709677419354,\n\
\ \"acc_norm_stderr\": 0.02458002892148101\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.2561576354679803,\n \"acc_stderr\": 0.0307127300709826,\n\
\ \"acc_norm\": 0.2561576354679803,\n \"acc_norm_stderr\": 0.0307127300709826\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\"\
: 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.296969696969697,\n \"acc_stderr\": 0.035679697722680474,\n\
\ \"acc_norm\": 0.296969696969697,\n \"acc_norm_stderr\": 0.035679697722680474\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.25252525252525254,\n \"acc_stderr\": 0.030954055470365904,\n \"\
acc_norm\": 0.25252525252525254,\n \"acc_norm_stderr\": 0.030954055470365904\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.22279792746113988,\n \"acc_stderr\": 0.030031147977641545,\n\
\ \"acc_norm\": 0.22279792746113988,\n \"acc_norm_stderr\": 0.030031147977641545\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.23076923076923078,\n \"acc_stderr\": 0.02136202772522271,\n\
\ \"acc_norm\": 0.23076923076923078,\n \"acc_norm_stderr\": 0.02136202772522271\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.24814814814814815,\n \"acc_stderr\": 0.0263357394040558,\n \
\ \"acc_norm\": 0.24814814814814815,\n \"acc_norm_stderr\": 0.0263357394040558\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.21428571428571427,\n \"acc_stderr\": 0.026653531596715477,\n\
\ \"acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.026653531596715477\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.26490066225165565,\n \"acc_stderr\": 0.036030385453603854,\n \"\
acc_norm\": 0.26490066225165565,\n \"acc_norm_stderr\": 0.036030385453603854\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.23119266055045873,\n \"acc_stderr\": 0.01807575024163315,\n \"\
acc_norm\": 0.23119266055045873,\n \"acc_norm_stderr\": 0.01807575024163315\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.19444444444444445,\n \"acc_stderr\": 0.026991454502036737,\n \"\
acc_norm\": 0.19444444444444445,\n \"acc_norm_stderr\": 0.026991454502036737\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.20098039215686275,\n \"acc_stderr\": 0.02812597226565437,\n \"\
acc_norm\": 0.20098039215686275,\n \"acc_norm_stderr\": 0.02812597226565437\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.26582278481012656,\n \"acc_stderr\": 0.02875679962965835,\n \
\ \"acc_norm\": 0.26582278481012656,\n \"acc_norm_stderr\": 0.02875679962965835\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.33183856502242154,\n\
\ \"acc_stderr\": 0.03160295143776679,\n \"acc_norm\": 0.33183856502242154,\n\
\ \"acc_norm_stderr\": 0.03160295143776679\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.1984732824427481,\n \"acc_stderr\": 0.034981493854624714,\n\
\ \"acc_norm\": 0.1984732824427481,\n \"acc_norm_stderr\": 0.034981493854624714\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.2809917355371901,\n \"acc_stderr\": 0.04103203830514512,\n \"\
acc_norm\": 0.2809917355371901,\n \"acc_norm_stderr\": 0.04103203830514512\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n\
\ \"acc_stderr\": 0.04236511258094633,\n \"acc_norm\": 0.25925925925925924,\n\
\ \"acc_norm_stderr\": 0.04236511258094633\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.2883435582822086,\n \"acc_stderr\": 0.035590395316173425,\n\
\ \"acc_norm\": 0.2883435582822086,\n \"acc_norm_stderr\": 0.035590395316173425\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2767857142857143,\n\
\ \"acc_stderr\": 0.04246624336697624,\n \"acc_norm\": 0.2767857142857143,\n\
\ \"acc_norm_stderr\": 0.04246624336697624\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.24271844660194175,\n \"acc_stderr\": 0.04245022486384495,\n\
\ \"acc_norm\": 0.24271844660194175,\n \"acc_norm_stderr\": 0.04245022486384495\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.23076923076923078,\n\
\ \"acc_stderr\": 0.027601921381417604,\n \"acc_norm\": 0.23076923076923078,\n\
\ \"acc_norm_stderr\": 0.027601921381417604\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.22,\n \"acc_stderr\": 0.0416333199893227,\n \
\ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.0416333199893227\n },\n\
\ \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2669220945083014,\n\
\ \"acc_stderr\": 0.015818450894777562,\n \"acc_norm\": 0.2669220945083014,\n\
\ \"acc_norm_stderr\": 0.015818450894777562\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.2658959537572254,\n \"acc_stderr\": 0.023786203255508277,\n\
\ \"acc_norm\": 0.2658959537572254,\n \"acc_norm_stderr\": 0.023786203255508277\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.25921787709497207,\n\
\ \"acc_stderr\": 0.014655780837497724,\n \"acc_norm\": 0.25921787709497207,\n\
\ \"acc_norm_stderr\": 0.014655780837497724\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.025829163272757482,\n\
\ \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.025829163272757482\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3022508038585209,\n\
\ \"acc_stderr\": 0.02608270069539965,\n \"acc_norm\": 0.3022508038585209,\n\
\ \"acc_norm_stderr\": 0.02608270069539965\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.02438366553103545,\n\
\ \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.02438366553103545\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432414,\n \
\ \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432414\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2379400260756193,\n\
\ \"acc_stderr\": 0.010875700787694233,\n \"acc_norm\": 0.2379400260756193,\n\
\ \"acc_norm_stderr\": 0.010875700787694233\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.22794117647058823,\n \"acc_stderr\": 0.025483081468029804,\n\
\ \"acc_norm\": 0.22794117647058823,\n \"acc_norm_stderr\": 0.025483081468029804\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.25326797385620914,\n \"acc_stderr\": 0.017593486895366835,\n \
\ \"acc_norm\": 0.25326797385620914,\n \"acc_norm_stderr\": 0.017593486895366835\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2545454545454545,\n\
\ \"acc_stderr\": 0.04172343038705382,\n \"acc_norm\": 0.2545454545454545,\n\
\ \"acc_norm_stderr\": 0.04172343038705382\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.18775510204081633,\n \"acc_stderr\": 0.025000256039546212,\n\
\ \"acc_norm\": 0.18775510204081633,\n \"acc_norm_stderr\": 0.025000256039546212\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.20398009950248755,\n\
\ \"acc_stderr\": 0.02849317624532608,\n \"acc_norm\": 0.20398009950248755,\n\
\ \"acc_norm_stderr\": 0.02849317624532608\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \
\ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3132530120481928,\n\
\ \"acc_stderr\": 0.036108050180310235,\n \"acc_norm\": 0.3132530120481928,\n\
\ \"acc_norm_stderr\": 0.036108050180310235\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.25146198830409355,\n \"acc_stderr\": 0.033275044238468436,\n\
\ \"acc_norm\": 0.25146198830409355,\n \"acc_norm_stderr\": 0.033275044238468436\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23745410036719705,\n\
\ \"mc1_stderr\": 0.014896277441041836,\n \"mc2\": 0.3899188336745711,\n\
\ \"mc2_stderr\": 0.014581462986356814\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.5730071033938438,\n \"acc_stderr\": 0.013901878072575055\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.0017827181208053692,\n \
\ \"em_stderr\": 0.0004320097346039005,\n \"f1\": 0.03894714765100676,\n\
\ \"f1_stderr\": 0.0011174860838397392\n },\n \"harness|gsm8k|5\":\
\ {\n \"acc\": 0.000758150113722517,\n \"acc_stderr\": 0.0007581501137225202\n\
\ }\n}\n```"
repo_url: https://huggingface.co/nnpy/Nape-0
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|arc:challenge|25_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|drop|3_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|gsm8k|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hellaswag|10_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T20-32-43.776654.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T20-32-43.776654.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- '**/details_harness|winogrande|5_2023-11-18T20-32-43.776654.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T20-32-43.776654.parquet'
- config_name: results
data_files:
- split: 2023_11_18T20_32_43.776654
path:
- results_2023-11-18T20-32-43.776654.parquet
- split: latest
path:
- results_2023-11-18T20-32-43.776654.parquet
---
# Dataset Card for Evaluation run of nnpy/Nape-0
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/nnpy/Nape-0
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [nnpy/Nape-0](https://huggingface.co/nnpy/Nape-0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_nnpy__Nape-0_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T20:32:43.776654](https://huggingface.co/datasets/open-llm-leaderboard/details_nnpy__Nape-0_public/blob/main/results_2023-11-18T20-32-43.776654.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.2543128094432495,
"acc_stderr": 0.03062072612679358,
"acc_norm": 0.2558387672983551,
"acc_norm_stderr": 0.031412385111215566,
"mc1": 0.23745410036719705,
"mc1_stderr": 0.014896277441041836,
"mc2": 0.3899188336745711,
"mc2_stderr": 0.014581462986356814,
"em": 0.0017827181208053692,
"em_stderr": 0.0004320097346039005,
"f1": 0.03894714765100676,
"f1_stderr": 0.0011174860838397392
},
"harness|arc:challenge|25": {
"acc": 0.31143344709897613,
"acc_stderr": 0.013532472099850939,
"acc_norm": 0.3267918088737201,
"acc_norm_stderr": 0.013706665975587331
},
"harness|hellaswag|10": {
"acc": 0.4470225054769966,
"acc_stderr": 0.004961693567208816,
"acc_norm": 0.5868352917745469,
"acc_norm_stderr": 0.004913955705080124
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909283,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.04094376269996793,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.04094376269996793
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.23684210526315788,
"acc_stderr": 0.034597776068105365,
"acc_norm": 0.23684210526315788,
"acc_norm_stderr": 0.034597776068105365
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.2,
"acc_stderr": 0.040201512610368445,
"acc_norm": 0.2,
"acc_norm_stderr": 0.040201512610368445
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.26037735849056604,
"acc_stderr": 0.027008766090708097,
"acc_norm": 0.26037735849056604,
"acc_norm_stderr": 0.027008766090708097
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.2152777777777778,
"acc_stderr": 0.03437079344106132,
"acc_norm": 0.2152777777777778,
"acc_norm_stderr": 0.03437079344106132
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.18,
"acc_stderr": 0.038612291966536955,
"acc_norm": 0.18,
"acc_norm_stderr": 0.038612291966536955
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.16,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.16,
"acc_norm_stderr": 0.03684529491774709
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909284
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.23121387283236994,
"acc_stderr": 0.03214737302029471,
"acc_norm": 0.23121387283236994,
"acc_norm_stderr": 0.03214737302029471
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.04280105837364396,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.04280105837364396
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909282,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909282
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.251063829787234,
"acc_stderr": 0.028346963777162452,
"acc_norm": 0.251063829787234,
"acc_norm_stderr": 0.028346963777162452
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.2719298245614035,
"acc_stderr": 0.041857744240220575,
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.041857744240220575
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.2896551724137931,
"acc_stderr": 0.03780019230438014,
"acc_norm": 0.2896551724137931,
"acc_norm_stderr": 0.03780019230438014
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.2671957671957672,
"acc_stderr": 0.02278967314577656,
"acc_norm": 0.2671957671957672,
"acc_norm_stderr": 0.02278967314577656
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.14285714285714285,
"acc_stderr": 0.0312984318574381,
"acc_norm": 0.14285714285714285,
"acc_norm_stderr": 0.0312984318574381
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.24838709677419354,
"acc_stderr": 0.02458002892148101,
"acc_norm": 0.24838709677419354,
"acc_norm_stderr": 0.02458002892148101
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.2561576354679803,
"acc_stderr": 0.0307127300709826,
"acc_norm": 0.2561576354679803,
"acc_norm_stderr": 0.0307127300709826
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.296969696969697,
"acc_stderr": 0.035679697722680474,
"acc_norm": 0.296969696969697,
"acc_norm_stderr": 0.035679697722680474
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.25252525252525254,
"acc_stderr": 0.030954055470365904,
"acc_norm": 0.25252525252525254,
"acc_norm_stderr": 0.030954055470365904
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.22279792746113988,
"acc_stderr": 0.030031147977641545,
"acc_norm": 0.22279792746113988,
"acc_norm_stderr": 0.030031147977641545
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.23076923076923078,
"acc_stderr": 0.02136202772522271,
"acc_norm": 0.23076923076923078,
"acc_norm_stderr": 0.02136202772522271
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.24814814814814815,
"acc_stderr": 0.0263357394040558,
"acc_norm": 0.24814814814814815,
"acc_norm_stderr": 0.0263357394040558
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.21428571428571427,
"acc_stderr": 0.026653531596715477,
"acc_norm": 0.21428571428571427,
"acc_norm_stderr": 0.026653531596715477
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.26490066225165565,
"acc_stderr": 0.036030385453603854,
"acc_norm": 0.26490066225165565,
"acc_norm_stderr": 0.036030385453603854
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.23119266055045873,
"acc_stderr": 0.01807575024163315,
"acc_norm": 0.23119266055045873,
"acc_norm_stderr": 0.01807575024163315
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.19444444444444445,
"acc_stderr": 0.026991454502036737,
"acc_norm": 0.19444444444444445,
"acc_norm_stderr": 0.026991454502036737
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.20098039215686275,
"acc_stderr": 0.02812597226565437,
"acc_norm": 0.20098039215686275,
"acc_norm_stderr": 0.02812597226565437
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.26582278481012656,
"acc_stderr": 0.02875679962965835,
"acc_norm": 0.26582278481012656,
"acc_norm_stderr": 0.02875679962965835
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.33183856502242154,
"acc_stderr": 0.03160295143776679,
"acc_norm": 0.33183856502242154,
"acc_norm_stderr": 0.03160295143776679
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.1984732824427481,
"acc_stderr": 0.034981493854624714,
"acc_norm": 0.1984732824427481,
"acc_norm_stderr": 0.034981493854624714
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.2809917355371901,
"acc_stderr": 0.04103203830514512,
"acc_norm": 0.2809917355371901,
"acc_norm_stderr": 0.04103203830514512
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.04236511258094633,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.04236511258094633
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.2883435582822086,
"acc_stderr": 0.035590395316173425,
"acc_norm": 0.2883435582822086,
"acc_norm_stderr": 0.035590395316173425
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.2767857142857143,
"acc_stderr": 0.04246624336697624,
"acc_norm": 0.2767857142857143,
"acc_norm_stderr": 0.04246624336697624
},
"harness|hendrycksTest-management|5": {
"acc": 0.24271844660194175,
"acc_stderr": 0.04245022486384495,
"acc_norm": 0.24271844660194175,
"acc_norm_stderr": 0.04245022486384495
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.23076923076923078,
"acc_stderr": 0.027601921381417604,
"acc_norm": 0.23076923076923078,
"acc_norm_stderr": 0.027601921381417604
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.22,
"acc_stderr": 0.0416333199893227,
"acc_norm": 0.22,
"acc_norm_stderr": 0.0416333199893227
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.2669220945083014,
"acc_stderr": 0.015818450894777562,
"acc_norm": 0.2669220945083014,
"acc_norm_stderr": 0.015818450894777562
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.2658959537572254,
"acc_stderr": 0.023786203255508277,
"acc_norm": 0.2658959537572254,
"acc_norm_stderr": 0.023786203255508277
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.25921787709497207,
"acc_stderr": 0.014655780837497724,
"acc_norm": 0.25921787709497207,
"acc_norm_stderr": 0.014655780837497724
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.025829163272757482,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.025829163272757482
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.3022508038585209,
"acc_stderr": 0.02608270069539965,
"acc_norm": 0.3022508038585209,
"acc_norm_stderr": 0.02608270069539965
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.02438366553103545,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.02438366553103545
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.23404255319148937,
"acc_stderr": 0.025257861359432414,
"acc_norm": 0.23404255319148937,
"acc_norm_stderr": 0.025257861359432414
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.2379400260756193,
"acc_stderr": 0.010875700787694233,
"acc_norm": 0.2379400260756193,
"acc_norm_stderr": 0.010875700787694233
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.22794117647058823,
"acc_stderr": 0.025483081468029804,
"acc_norm": 0.22794117647058823,
"acc_norm_stderr": 0.025483081468029804
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.25326797385620914,
"acc_stderr": 0.017593486895366835,
"acc_norm": 0.25326797385620914,
"acc_norm_stderr": 0.017593486895366835
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.2545454545454545,
"acc_stderr": 0.04172343038705382,
"acc_norm": 0.2545454545454545,
"acc_norm_stderr": 0.04172343038705382
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.18775510204081633,
"acc_stderr": 0.025000256039546212,
"acc_norm": 0.18775510204081633,
"acc_norm_stderr": 0.025000256039546212
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.20398009950248755,
"acc_stderr": 0.02849317624532608,
"acc_norm": 0.20398009950248755,
"acc_norm_stderr": 0.02849317624532608
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-virology|5": {
"acc": 0.3132530120481928,
"acc_stderr": 0.036108050180310235,
"acc_norm": 0.3132530120481928,
"acc_norm_stderr": 0.036108050180310235
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.25146198830409355,
"acc_stderr": 0.033275044238468436,
"acc_norm": 0.25146198830409355,
"acc_norm_stderr": 0.033275044238468436
},
"harness|truthfulqa:mc|0": {
"mc1": 0.23745410036719705,
"mc1_stderr": 0.014896277441041836,
"mc2": 0.3899188336745711,
"mc2_stderr": 0.014581462986356814
},
"harness|winogrande|5": {
"acc": 0.5730071033938438,
"acc_stderr": 0.013901878072575055
},
"harness|drop|3": {
"em": 0.0017827181208053692,
"em_stderr": 0.0004320097346039005,
"f1": 0.03894714765100676,
"f1_stderr": 0.0011174860838397392
},
"harness|gsm8k|5": {
"acc": 0.000758150113722517,
"acc_stderr": 0.0007581501137225202
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7458473443984985,
-0.8494880795478821,
0.24677877128124237,
0.23356056213378906,
-0.1550680547952652,
-0.044946394860744476,
0.05231887847185135,
-0.2336367964744568,
0.6273733973503113,
-0.03208882734179497,
-0.47408175468444824,
-0.6683633923530579,
-0.43474623560905457,
0.26797190308... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Sijuade/diffusion_latent_test | Sijuade | 2023-11-18T21:13:24Z | 0 | 0 | null | [
"license:mit",
"region:us"
] | 2023-11-18T21:13:24Z | 2023-11-18T20:40:34.000Z | 2023-11-18T20:40:34 | ---
license: mit
dataset_info:
features:
- name: latent
sequence:
sequence:
sequence:
sequence: float64
- name: noised_latents
sequence:
sequence:
sequence:
sequence: float64
- name: noise
sequence:
sequence:
sequence:
sequence: float64
- name: timesteps
dtype: int64
- name: label
dtype: int64
splits:
- name: train
num_bytes: 2543200
num_examples: 100
download_size: 3122286
dataset_size: 2543200
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Arutairu/Himeko | Arutairu | 2023-11-19T14:57:19Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T14:57:19Z | 2023-11-18T20:53:19.000Z | 2023-11-18T20:53:19 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
textminr/ner-test2 | textminr | 2023-11-18T21:05:13Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T21:05:13Z | 2023-11-18T21:03:26.000Z | 2023-11-18T21:03:26 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-6.7b-v2-instruct_public | open-llm-leaderboard | 2023-11-18T21:07:33Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T21:07:33Z | 2023-11-18T21:06:43.000Z | 2023-11-18T21:06:43 | ---
pretty_name: Evaluation run of AI-Sweden-Models/gpt-sw3-6.7b-v2-instruct
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [AI-Sweden-Models/gpt-sw3-6.7b-v2-instruct](https://huggingface.co/AI-Sweden-Models/gpt-sw3-6.7b-v2-instruct)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-6.7b-v2-instruct_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T21:04:21.939404](https://huggingface.co/datasets/open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-6.7b-v2-instruct_public/blob/main/results_2023-11-18T21-04-21.939404.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.32058974654497724,\n\
\ \"acc_stderr\": 0.03287256745618845,\n \"acc_norm\": 0.3233939935906761,\n\
\ \"acc_norm_stderr\": 0.03364411678813401,\n \"mc1\": 0.26193390452876375,\n\
\ \"mc1_stderr\": 0.015392118805015023,\n \"mc2\": 0.4032485125499964,\n\
\ \"mc2_stderr\": 0.014292284301112663,\n \"em\": 0.22766359060402686,\n\
\ \"em_stderr\": 0.004294273453162853,\n \"f1\": 0.266680998322148,\n\
\ \"f1_stderr\": 0.00428696034436648\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.3575085324232082,\n \"acc_stderr\": 0.014005494275916576,\n\
\ \"acc_norm\": 0.40784982935153585,\n \"acc_norm_stderr\": 0.014361097288449707\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5046803425612428,\n\
\ \"acc_stderr\": 0.004989562798280523,\n \"acc_norm\": 0.6776538538139812,\n\
\ \"acc_norm_stderr\": 0.004664195159393912\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \
\ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.362962962962963,\n\
\ \"acc_stderr\": 0.04153948404742398,\n \"acc_norm\": 0.362962962962963,\n\
\ \"acc_norm_stderr\": 0.04153948404742398\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.32894736842105265,\n \"acc_stderr\": 0.03823428969926604,\n\
\ \"acc_norm\": 0.32894736842105265,\n \"acc_norm_stderr\": 0.03823428969926604\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.35,\n\
\ \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \
\ \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.33962264150943394,\n \"acc_stderr\": 0.029146904747798335,\n\
\ \"acc_norm\": 0.33962264150943394,\n \"acc_norm_stderr\": 0.029146904747798335\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3680555555555556,\n\
\ \"acc_stderr\": 0.04032999053960718,\n \"acc_norm\": 0.3680555555555556,\n\
\ \"acc_norm_stderr\": 0.04032999053960718\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \
\ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.26,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\"\
: 0.26,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847415,\n \
\ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847415\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3352601156069364,\n\
\ \"acc_stderr\": 0.03599586301247078,\n \"acc_norm\": 0.3352601156069364,\n\
\ \"acc_norm_stderr\": 0.03599586301247078\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.0433643270799318,\n\
\ \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.0433643270799318\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n\
\ \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.3191489361702128,\n \"acc_stderr\": 0.030472973363380045,\n\
\ \"acc_norm\": 0.3191489361702128,\n \"acc_norm_stderr\": 0.030472973363380045\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.24561403508771928,\n\
\ \"acc_stderr\": 0.040493392977481404,\n \"acc_norm\": 0.24561403508771928,\n\
\ \"acc_norm_stderr\": 0.040493392977481404\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.27586206896551724,\n \"acc_stderr\": 0.03724563619774634,\n\
\ \"acc_norm\": 0.27586206896551724,\n \"acc_norm_stderr\": 0.03724563619774634\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.29894179894179895,\n \"acc_stderr\": 0.023577604791655805,\n \"\
acc_norm\": 0.29894179894179895,\n \"acc_norm_stderr\": 0.023577604791655805\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2619047619047619,\n\
\ \"acc_stderr\": 0.039325376803928704,\n \"acc_norm\": 0.2619047619047619,\n\
\ \"acc_norm_stderr\": 0.039325376803928704\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932269,\n \
\ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932269\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.3161290322580645,\n\
\ \"acc_stderr\": 0.02645087448904276,\n \"acc_norm\": 0.3161290322580645,\n\
\ \"acc_norm_stderr\": 0.02645087448904276\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.1921182266009852,\n \"acc_stderr\": 0.027719315709614775,\n\
\ \"acc_norm\": 0.1921182266009852,\n \"acc_norm_stderr\": 0.027719315709614775\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\"\
: 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.3393939393939394,\n \"acc_stderr\": 0.03697442205031596,\n\
\ \"acc_norm\": 0.3393939393939394,\n \"acc_norm_stderr\": 0.03697442205031596\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.31313131313131315,\n \"acc_stderr\": 0.03304205087813653,\n \"\
acc_norm\": 0.31313131313131315,\n \"acc_norm_stderr\": 0.03304205087813653\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.35233160621761656,\n \"acc_stderr\": 0.034474782864143565,\n\
\ \"acc_norm\": 0.35233160621761656,\n \"acc_norm_stderr\": 0.034474782864143565\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.2564102564102564,\n \"acc_stderr\": 0.02213908110397155,\n \
\ \"acc_norm\": 0.2564102564102564,\n \"acc_norm_stderr\": 0.02213908110397155\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.25925925925925924,\n \"acc_stderr\": 0.026719240783712163,\n \
\ \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.026719240783712163\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.2689075630252101,\n \"acc_stderr\": 0.028801392193631276,\n\
\ \"acc_norm\": 0.2689075630252101,\n \"acc_norm_stderr\": 0.028801392193631276\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.2251655629139073,\n \"acc_stderr\": 0.03410435282008936,\n \"\
acc_norm\": 0.2251655629139073,\n \"acc_norm_stderr\": 0.03410435282008936\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.42385321100917434,\n \"acc_stderr\": 0.021187263209087516,\n \"\
acc_norm\": 0.42385321100917434,\n \"acc_norm_stderr\": 0.021187263209087516\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.2037037037037037,\n \"acc_stderr\": 0.027467401804058017,\n \"\
acc_norm\": 0.2037037037037037,\n \"acc_norm_stderr\": 0.027467401804058017\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.3088235294117647,\n \"acc_stderr\": 0.03242661719827218,\n \"\
acc_norm\": 0.3088235294117647,\n \"acc_norm_stderr\": 0.03242661719827218\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.4050632911392405,\n \"acc_stderr\": 0.03195514741370673,\n \
\ \"acc_norm\": 0.4050632911392405,\n \"acc_norm_stderr\": 0.03195514741370673\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.38565022421524664,\n\
\ \"acc_stderr\": 0.03266842214289201,\n \"acc_norm\": 0.38565022421524664,\n\
\ \"acc_norm_stderr\": 0.03266842214289201\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.40458015267175573,\n \"acc_stderr\": 0.043046937953806645,\n\
\ \"acc_norm\": 0.40458015267175573,\n \"acc_norm_stderr\": 0.043046937953806645\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.4214876033057851,\n \"acc_stderr\": 0.045077322787750944,\n \"\
acc_norm\": 0.4214876033057851,\n \"acc_norm_stderr\": 0.045077322787750944\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.35185185185185186,\n\
\ \"acc_stderr\": 0.04616631111801714,\n \"acc_norm\": 0.35185185185185186,\n\
\ \"acc_norm_stderr\": 0.04616631111801714\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.26993865030674846,\n \"acc_stderr\": 0.034878251684978906,\n\
\ \"acc_norm\": 0.26993865030674846,\n \"acc_norm_stderr\": 0.034878251684978906\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.35714285714285715,\n\
\ \"acc_stderr\": 0.04547960999764376,\n \"acc_norm\": 0.35714285714285715,\n\
\ \"acc_norm_stderr\": 0.04547960999764376\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.34951456310679613,\n \"acc_stderr\": 0.047211885060971716,\n\
\ \"acc_norm\": 0.34951456310679613,\n \"acc_norm_stderr\": 0.047211885060971716\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.43162393162393164,\n\
\ \"acc_stderr\": 0.0324483553531149,\n \"acc_norm\": 0.43162393162393164,\n\
\ \"acc_norm_stderr\": 0.0324483553531149\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \
\ \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.39080459770114945,\n\
\ \"acc_stderr\": 0.01744836606706253,\n \"acc_norm\": 0.39080459770114945,\n\
\ \"acc_norm_stderr\": 0.01744836606706253\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.315028901734104,\n \"acc_stderr\": 0.025009313790069713,\n\
\ \"acc_norm\": 0.315028901734104,\n \"acc_norm_stderr\": 0.025009313790069713\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\
\ \"acc_stderr\": 0.01433352205921789,\n \"acc_norm\": 0.2424581005586592,\n\
\ \"acc_norm_stderr\": 0.01433352205921789\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.34967320261437906,\n \"acc_stderr\": 0.027305308076274702,\n\
\ \"acc_norm\": 0.34967320261437906,\n \"acc_norm_stderr\": 0.027305308076274702\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.28938906752411575,\n\
\ \"acc_stderr\": 0.02575586592263294,\n \"acc_norm\": 0.28938906752411575,\n\
\ \"acc_norm_stderr\": 0.02575586592263294\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.32407407407407407,\n \"acc_stderr\": 0.026041766202717167,\n\
\ \"acc_norm\": 0.32407407407407407,\n \"acc_norm_stderr\": 0.026041766202717167\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.3262411347517731,\n \"acc_stderr\": 0.027968453043563168,\n \
\ \"acc_norm\": 0.3262411347517731,\n \"acc_norm_stderr\": 0.027968453043563168\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.273142112125163,\n\
\ \"acc_stderr\": 0.01138015056783041,\n \"acc_norm\": 0.273142112125163,\n\
\ \"acc_norm_stderr\": 0.01138015056783041\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.375,\n \"acc_stderr\": 0.029408372932278746,\n \
\ \"acc_norm\": 0.375,\n \"acc_norm_stderr\": 0.029408372932278746\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.30392156862745096,\n \"acc_stderr\": 0.018607552131279834,\n \
\ \"acc_norm\": 0.30392156862745096,\n \"acc_norm_stderr\": 0.018607552131279834\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.36363636363636365,\n\
\ \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.36363636363636365,\n\
\ \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.22448979591836735,\n \"acc_stderr\": 0.026711430555538408,\n\
\ \"acc_norm\": 0.22448979591836735,\n \"acc_norm_stderr\": 0.026711430555538408\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.3482587064676617,\n\
\ \"acc_stderr\": 0.033687874661154596,\n \"acc_norm\": 0.3482587064676617,\n\
\ \"acc_norm_stderr\": 0.033687874661154596\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \
\ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.30120481927710846,\n\
\ \"acc_stderr\": 0.0357160923005348,\n \"acc_norm\": 0.30120481927710846,\n\
\ \"acc_norm_stderr\": 0.0357160923005348\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.38011695906432746,\n \"acc_stderr\": 0.037229657413855394,\n\
\ \"acc_norm\": 0.38011695906432746,\n \"acc_norm_stderr\": 0.037229657413855394\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.26193390452876375,\n\
\ \"mc1_stderr\": 0.015392118805015023,\n \"mc2\": 0.4032485125499964,\n\
\ \"mc2_stderr\": 0.014292284301112663\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.6353591160220995,\n \"acc_stderr\": 0.013527746622429844\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.22766359060402686,\n \
\ \"em_stderr\": 0.004294273453162853,\n \"f1\": 0.266680998322148,\n \
\ \"f1_stderr\": 0.00428696034436648\n },\n \"harness|gsm8k|5\": {\n\
\ \"acc\": 0.06368460955269144,\n \"acc_stderr\": 0.006726213078805701\n\
\ }\n}\n```"
repo_url: https://huggingface.co/AI-Sweden-Models/gpt-sw3-6.7b-v2-instruct
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|arc:challenge|25_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|drop|3_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|gsm8k|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hellaswag|10_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-04-21.939404.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T21-04-21.939404.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- '**/details_harness|winogrande|5_2023-11-18T21-04-21.939404.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T21-04-21.939404.parquet'
- config_name: results
data_files:
- split: 2023_11_18T21_04_21.939404
path:
- results_2023-11-18T21-04-21.939404.parquet
- split: latest
path:
- results_2023-11-18T21-04-21.939404.parquet
---
# Dataset Card for Evaluation run of AI-Sweden-Models/gpt-sw3-6.7b-v2-instruct
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/AI-Sweden-Models/gpt-sw3-6.7b-v2-instruct
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [AI-Sweden-Models/gpt-sw3-6.7b-v2-instruct](https://huggingface.co/AI-Sweden-Models/gpt-sw3-6.7b-v2-instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-6.7b-v2-instruct_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T21:04:21.939404](https://huggingface.co/datasets/open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-6.7b-v2-instruct_public/blob/main/results_2023-11-18T21-04-21.939404.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.32058974654497724,
"acc_stderr": 0.03287256745618845,
"acc_norm": 0.3233939935906761,
"acc_norm_stderr": 0.03364411678813401,
"mc1": 0.26193390452876375,
"mc1_stderr": 0.015392118805015023,
"mc2": 0.4032485125499964,
"mc2_stderr": 0.014292284301112663,
"em": 0.22766359060402686,
"em_stderr": 0.004294273453162853,
"f1": 0.266680998322148,
"f1_stderr": 0.00428696034436648
},
"harness|arc:challenge|25": {
"acc": 0.3575085324232082,
"acc_stderr": 0.014005494275916576,
"acc_norm": 0.40784982935153585,
"acc_norm_stderr": 0.014361097288449707
},
"harness|hellaswag|10": {
"acc": 0.5046803425612428,
"acc_stderr": 0.004989562798280523,
"acc_norm": 0.6776538538139812,
"acc_norm_stderr": 0.004664195159393912
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.362962962962963,
"acc_stderr": 0.04153948404742398,
"acc_norm": 0.362962962962963,
"acc_norm_stderr": 0.04153948404742398
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.32894736842105265,
"acc_stderr": 0.03823428969926604,
"acc_norm": 0.32894736842105265,
"acc_norm_stderr": 0.03823428969926604
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.33962264150943394,
"acc_stderr": 0.029146904747798335,
"acc_norm": 0.33962264150943394,
"acc_norm_stderr": 0.029146904747798335
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.3680555555555556,
"acc_stderr": 0.04032999053960718,
"acc_norm": 0.3680555555555556,
"acc_norm_stderr": 0.04032999053960718
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768079
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847415,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847415
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.3352601156069364,
"acc_stderr": 0.03599586301247078,
"acc_norm": 0.3352601156069364,
"acc_norm_stderr": 0.03599586301247078
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.0433643270799318,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.0433643270799318
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.3191489361702128,
"acc_stderr": 0.030472973363380045,
"acc_norm": 0.3191489361702128,
"acc_norm_stderr": 0.030472973363380045
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.24561403508771928,
"acc_stderr": 0.040493392977481404,
"acc_norm": 0.24561403508771928,
"acc_norm_stderr": 0.040493392977481404
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.27586206896551724,
"acc_stderr": 0.03724563619774634,
"acc_norm": 0.27586206896551724,
"acc_norm_stderr": 0.03724563619774634
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.29894179894179895,
"acc_stderr": 0.023577604791655805,
"acc_norm": 0.29894179894179895,
"acc_norm_stderr": 0.023577604791655805
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.2619047619047619,
"acc_stderr": 0.039325376803928704,
"acc_norm": 0.2619047619047619,
"acc_norm_stderr": 0.039325376803928704
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.3161290322580645,
"acc_stderr": 0.02645087448904276,
"acc_norm": 0.3161290322580645,
"acc_norm_stderr": 0.02645087448904276
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.1921182266009852,
"acc_stderr": 0.027719315709614775,
"acc_norm": 0.1921182266009852,
"acc_norm_stderr": 0.027719315709614775
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.25,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.25,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.3393939393939394,
"acc_stderr": 0.03697442205031596,
"acc_norm": 0.3393939393939394,
"acc_norm_stderr": 0.03697442205031596
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.31313131313131315,
"acc_stderr": 0.03304205087813653,
"acc_norm": 0.31313131313131315,
"acc_norm_stderr": 0.03304205087813653
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.35233160621761656,
"acc_stderr": 0.034474782864143565,
"acc_norm": 0.35233160621761656,
"acc_norm_stderr": 0.034474782864143565
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.2564102564102564,
"acc_stderr": 0.02213908110397155,
"acc_norm": 0.2564102564102564,
"acc_norm_stderr": 0.02213908110397155
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.026719240783712163,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.026719240783712163
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.2689075630252101,
"acc_stderr": 0.028801392193631276,
"acc_norm": 0.2689075630252101,
"acc_norm_stderr": 0.028801392193631276
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.2251655629139073,
"acc_stderr": 0.03410435282008936,
"acc_norm": 0.2251655629139073,
"acc_norm_stderr": 0.03410435282008936
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.42385321100917434,
"acc_stderr": 0.021187263209087516,
"acc_norm": 0.42385321100917434,
"acc_norm_stderr": 0.021187263209087516
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.2037037037037037,
"acc_stderr": 0.027467401804058017,
"acc_norm": 0.2037037037037037,
"acc_norm_stderr": 0.027467401804058017
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.3088235294117647,
"acc_stderr": 0.03242661719827218,
"acc_norm": 0.3088235294117647,
"acc_norm_stderr": 0.03242661719827218
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.4050632911392405,
"acc_stderr": 0.03195514741370673,
"acc_norm": 0.4050632911392405,
"acc_norm_stderr": 0.03195514741370673
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.38565022421524664,
"acc_stderr": 0.03266842214289201,
"acc_norm": 0.38565022421524664,
"acc_norm_stderr": 0.03266842214289201
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.40458015267175573,
"acc_stderr": 0.043046937953806645,
"acc_norm": 0.40458015267175573,
"acc_norm_stderr": 0.043046937953806645
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.4214876033057851,
"acc_stderr": 0.045077322787750944,
"acc_norm": 0.4214876033057851,
"acc_norm_stderr": 0.045077322787750944
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.35185185185185186,
"acc_stderr": 0.04616631111801714,
"acc_norm": 0.35185185185185186,
"acc_norm_stderr": 0.04616631111801714
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.26993865030674846,
"acc_stderr": 0.034878251684978906,
"acc_norm": 0.26993865030674846,
"acc_norm_stderr": 0.034878251684978906
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.35714285714285715,
"acc_stderr": 0.04547960999764376,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.04547960999764376
},
"harness|hendrycksTest-management|5": {
"acc": 0.34951456310679613,
"acc_stderr": 0.047211885060971716,
"acc_norm": 0.34951456310679613,
"acc_norm_stderr": 0.047211885060971716
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.43162393162393164,
"acc_stderr": 0.0324483553531149,
"acc_norm": 0.43162393162393164,
"acc_norm_stderr": 0.0324483553531149
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.39080459770114945,
"acc_stderr": 0.01744836606706253,
"acc_norm": 0.39080459770114945,
"acc_norm_stderr": 0.01744836606706253
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.315028901734104,
"acc_stderr": 0.025009313790069713,
"acc_norm": 0.315028901734104,
"acc_norm_stderr": 0.025009313790069713
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.01433352205921789,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.01433352205921789
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.34967320261437906,
"acc_stderr": 0.027305308076274702,
"acc_norm": 0.34967320261437906,
"acc_norm_stderr": 0.027305308076274702
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.28938906752411575,
"acc_stderr": 0.02575586592263294,
"acc_norm": 0.28938906752411575,
"acc_norm_stderr": 0.02575586592263294
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.32407407407407407,
"acc_stderr": 0.026041766202717167,
"acc_norm": 0.32407407407407407,
"acc_norm_stderr": 0.026041766202717167
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.3262411347517731,
"acc_stderr": 0.027968453043563168,
"acc_norm": 0.3262411347517731,
"acc_norm_stderr": 0.027968453043563168
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.273142112125163,
"acc_stderr": 0.01138015056783041,
"acc_norm": 0.273142112125163,
"acc_norm_stderr": 0.01138015056783041
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.375,
"acc_stderr": 0.029408372932278746,
"acc_norm": 0.375,
"acc_norm_stderr": 0.029408372932278746
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.30392156862745096,
"acc_stderr": 0.018607552131279834,
"acc_norm": 0.30392156862745096,
"acc_norm_stderr": 0.018607552131279834
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.36363636363636365,
"acc_stderr": 0.04607582090719976,
"acc_norm": 0.36363636363636365,
"acc_norm_stderr": 0.04607582090719976
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.22448979591836735,
"acc_stderr": 0.026711430555538408,
"acc_norm": 0.22448979591836735,
"acc_norm_stderr": 0.026711430555538408
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.3482587064676617,
"acc_stderr": 0.033687874661154596,
"acc_norm": 0.3482587064676617,
"acc_norm_stderr": 0.033687874661154596
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-virology|5": {
"acc": 0.30120481927710846,
"acc_stderr": 0.0357160923005348,
"acc_norm": 0.30120481927710846,
"acc_norm_stderr": 0.0357160923005348
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.38011695906432746,
"acc_stderr": 0.037229657413855394,
"acc_norm": 0.38011695906432746,
"acc_norm_stderr": 0.037229657413855394
},
"harness|truthfulqa:mc|0": {
"mc1": 0.26193390452876375,
"mc1_stderr": 0.015392118805015023,
"mc2": 0.4032485125499964,
"mc2_stderr": 0.014292284301112663
},
"harness|winogrande|5": {
"acc": 0.6353591160220995,
"acc_stderr": 0.013527746622429844
},
"harness|drop|3": {
"em": 0.22766359060402686,
"em_stderr": 0.004294273453162853,
"f1": 0.266680998322148,
"f1_stderr": 0.00428696034436648
},
"harness|gsm8k|5": {
"acc": 0.06368460955269144,
"acc_stderr": 0.006726213078805701
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7107391357421875,
-0.840462327003479,
0.30288174748420715,
0.19927100837230682,
-0.18521398305892944,
-0.06302601099014282,
0.02096422389149666,
-0.247011199593544,
0.4957811236381531,
-0.06133496016263962,
-0.4696747362613678,
-0.6918721795082092,
-0.43452268838882446,
0.21732287108898... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
gilsonk12/mundosdescolhecido | gilsonk12 | 2023-11-18T21:13:57Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-18T21:13:57Z | 2023-11-18T21:09:13.000Z | 2023-11-18T21:09:13 | ---
license: openrail
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
KaiserML/Techie_Raw_PDF | KaiserML | 2023-11-18T23:40:00Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T23:40:00Z | 2023-11-18T21:09:39.000Z | 2023-11-18T21:09:39 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
sadaty515/asdadasdgsfasd | sadaty515 | 2023-11-18T21:12:32Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T21:12:32Z | 2023-11-18T21:12:32.000Z | 2023-11-18T21:12:32 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
gilsonk12/mundodescolhecido | gilsonk12 | 2023-11-18T21:25:12Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-18T21:25:12Z | 2023-11-18T21:24:33.000Z | 2023-11-18T21:24:33 | ---
license: openrail
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_vihangd__shearedplats-1.3b-v1_public | open-llm-leaderboard | 2023-11-18T21:30:55Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T21:30:55Z | 2023-11-18T21:30:07.000Z | 2023-11-18T21:30:07 | ---
pretty_name: Evaluation run of vihangd/shearedplats-1.3b-v1
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [vihangd/shearedplats-1.3b-v1](https://huggingface.co/vihangd/shearedplats-1.3b-v1)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_vihangd__shearedplats-1.3b-v1_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T21:27:03.574383](https://huggingface.co/datasets/open-llm-leaderboard/details_vihangd__shearedplats-1.3b-v1_public/blob/main/results_2023-11-18T21-27-03.574383.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.253847590681609,\n\
\ \"acc_stderr\": 0.030523099331108815,\n \"acc_norm\": 0.25573186704882195,\n\
\ \"acc_norm_stderr\": 0.031292815233613276,\n \"mc1\": 0.204406364749082,\n\
\ \"mc1_stderr\": 0.014117174337432618,\n \"mc2\": 0.3392533208873607,\n\
\ \"mc2_stderr\": 0.014078645743359227,\n \"em\": 0.003355704697986577,\n\
\ \"em_stderr\": 0.0005922452850005238,\n \"f1\": 0.0555180369127516,\n\
\ \"f1_stderr\": 0.0013765753121727882\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.3174061433447099,\n \"acc_stderr\": 0.013602239088038173,\n\
\ \"acc_norm\": 0.35409556313993173,\n \"acc_norm_stderr\": 0.013975454122756557\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4705238000398327,\n\
\ \"acc_stderr\": 0.0049811031579404495,\n \"acc_norm\": 0.6274646484763992,\n\
\ \"acc_norm_stderr\": 0.004824917516374187\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \
\ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2222222222222222,\n\
\ \"acc_stderr\": 0.03591444084196969,\n \"acc_norm\": 0.2222222222222222,\n\
\ \"acc_norm_stderr\": 0.03591444084196969\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.3223684210526316,\n \"acc_stderr\": 0.03803510248351585,\n\
\ \"acc_norm\": 0.3223684210526316,\n \"acc_norm_stderr\": 0.03803510248351585\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.28,\n\
\ \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \
\ \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.2528301886792453,\n \"acc_stderr\": 0.026749899771241238,\n\
\ \"acc_norm\": 0.2528301886792453,\n \"acc_norm_stderr\": 0.026749899771241238\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2222222222222222,\n\
\ \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.2222222222222222,\n\
\ \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \
\ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.15,\n \"acc_stderr\": 0.035887028128263714,\n \"acc_norm\"\
: 0.15,\n \"acc_norm_stderr\": 0.035887028128263714\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909281,\n \
\ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909281\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24855491329479767,\n\
\ \"acc_stderr\": 0.03295304696818318,\n \"acc_norm\": 0.24855491329479767,\n\
\ \"acc_norm_stderr\": 0.03295304696818318\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.04389869956808778,\n\
\ \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.04389869956808778\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.27,\n \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\": 0.27,\n\
\ \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.3276595744680851,\n \"acc_stderr\": 0.030683020843231004,\n\
\ \"acc_norm\": 0.3276595744680851,\n \"acc_norm_stderr\": 0.030683020843231004\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.21929824561403508,\n\
\ \"acc_stderr\": 0.03892431106518754,\n \"acc_norm\": 0.21929824561403508,\n\
\ \"acc_norm_stderr\": 0.03892431106518754\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.23448275862068965,\n \"acc_stderr\": 0.035306258743465914,\n\
\ \"acc_norm\": 0.23448275862068965,\n \"acc_norm_stderr\": 0.035306258743465914\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.25925925925925924,\n \"acc_stderr\": 0.022569897074918417,\n \"\
acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.022569897074918417\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.15873015873015872,\n\
\ \"acc_stderr\": 0.03268454013011744,\n \"acc_norm\": 0.15873015873015872,\n\
\ \"acc_norm_stderr\": 0.03268454013011744\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \
\ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.27419354838709675,\n \"acc_stderr\": 0.025378139970885196,\n \"\
acc_norm\": 0.27419354838709675,\n \"acc_norm_stderr\": 0.025378139970885196\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.270935960591133,\n \"acc_stderr\": 0.031270907132976984,\n \"\
acc_norm\": 0.270935960591133,\n \"acc_norm_stderr\": 0.031270907132976984\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932269,\n \"acc_norm\"\
: 0.22,\n \"acc_norm_stderr\": 0.04163331998932269\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.2606060606060606,\n \"acc_stderr\": 0.034277431758165236,\n\
\ \"acc_norm\": 0.2606060606060606,\n \"acc_norm_stderr\": 0.034277431758165236\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.21717171717171718,\n \"acc_stderr\": 0.029376616484945633,\n \"\
acc_norm\": 0.21717171717171718,\n \"acc_norm_stderr\": 0.029376616484945633\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.21243523316062177,\n \"acc_stderr\": 0.029519282616817244,\n\
\ \"acc_norm\": 0.21243523316062177,\n \"acc_norm_stderr\": 0.029519282616817244\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.24615384615384617,\n \"acc_stderr\": 0.021840866990423084,\n\
\ \"acc_norm\": 0.24615384615384617,\n \"acc_norm_stderr\": 0.021840866990423084\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3037037037037037,\n \"acc_stderr\": 0.028037929969114993,\n \
\ \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.028037929969114993\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.23109243697478993,\n \"acc_stderr\": 0.027381406927868966,\n\
\ \"acc_norm\": 0.23109243697478993,\n \"acc_norm_stderr\": 0.027381406927868966\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.2251655629139073,\n \"acc_stderr\": 0.03410435282008936,\n \"\
acc_norm\": 0.2251655629139073,\n \"acc_norm_stderr\": 0.03410435282008936\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.23669724770642203,\n \"acc_stderr\": 0.01822407811729907,\n \"\
acc_norm\": 0.23669724770642203,\n \"acc_norm_stderr\": 0.01822407811729907\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.37037037037037035,\n \"acc_stderr\": 0.03293377139415191,\n \"\
acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.03293377139415191\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.2107843137254902,\n \"acc_stderr\": 0.028626547912437388,\n \"\
acc_norm\": 0.2107843137254902,\n \"acc_norm_stderr\": 0.028626547912437388\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.27848101265822783,\n \"acc_stderr\": 0.029178682304842548,\n \
\ \"acc_norm\": 0.27848101265822783,\n \"acc_norm_stderr\": 0.029178682304842548\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.336322869955157,\n\
\ \"acc_stderr\": 0.031708824268455,\n \"acc_norm\": 0.336322869955157,\n\
\ \"acc_norm_stderr\": 0.031708824268455\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.22900763358778625,\n \"acc_stderr\": 0.036853466317118506,\n\
\ \"acc_norm\": 0.22900763358778625,\n \"acc_norm_stderr\": 0.036853466317118506\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.38016528925619836,\n \"acc_stderr\": 0.04431324501968432,\n \"\
acc_norm\": 0.38016528925619836,\n \"acc_norm_stderr\": 0.04431324501968432\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.23148148148148148,\n\
\ \"acc_stderr\": 0.04077494709252628,\n \"acc_norm\": 0.23148148148148148,\n\
\ \"acc_norm_stderr\": 0.04077494709252628\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.22699386503067484,\n \"acc_stderr\": 0.032910995786157686,\n\
\ \"acc_norm\": 0.22699386503067484,\n \"acc_norm_stderr\": 0.032910995786157686\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.23214285714285715,\n\
\ \"acc_stderr\": 0.04007341809755805,\n \"acc_norm\": 0.23214285714285715,\n\
\ \"acc_norm_stderr\": 0.04007341809755805\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.23300970873786409,\n \"acc_stderr\": 0.041858325989283164,\n\
\ \"acc_norm\": 0.23300970873786409,\n \"acc_norm_stderr\": 0.041858325989283164\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.23076923076923078,\n\
\ \"acc_stderr\": 0.027601921381417586,\n \"acc_norm\": 0.23076923076923078,\n\
\ \"acc_norm_stderr\": 0.027601921381417586\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.22,\n \"acc_stderr\": 0.0416333199893227,\n \
\ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.0416333199893227\n },\n\
\ \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.26947637292464877,\n\
\ \"acc_stderr\": 0.01586624307321505,\n \"acc_norm\": 0.26947637292464877,\n\
\ \"acc_norm_stderr\": 0.01586624307321505\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.21676300578034682,\n \"acc_stderr\": 0.02218347766841286,\n\
\ \"acc_norm\": 0.21676300578034682,\n \"acc_norm_stderr\": 0.02218347766841286\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.25251396648044694,\n\
\ \"acc_stderr\": 0.014530330201468641,\n \"acc_norm\": 0.25251396648044694,\n\
\ \"acc_norm_stderr\": 0.014530330201468641\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.0239291555173513,\n\
\ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.0239291555173513\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.24437299035369775,\n\
\ \"acc_stderr\": 0.024406162094668893,\n \"acc_norm\": 0.24437299035369775,\n\
\ \"acc_norm_stderr\": 0.024406162094668893\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.02540719779889016,\n\
\ \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.02540719779889016\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.2730496453900709,\n \"acc_stderr\": 0.02657786094330785,\n \
\ \"acc_norm\": 0.2730496453900709,\n \"acc_norm_stderr\": 0.02657786094330785\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24511082138200782,\n\
\ \"acc_stderr\": 0.010986307870045514,\n \"acc_norm\": 0.24511082138200782,\n\
\ \"acc_norm_stderr\": 0.010986307870045514\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.1948529411764706,\n \"acc_stderr\": 0.024060599423487424,\n\
\ \"acc_norm\": 0.1948529411764706,\n \"acc_norm_stderr\": 0.024060599423487424\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.24673202614379086,\n \"acc_stderr\": 0.017440820367402507,\n \
\ \"acc_norm\": 0.24673202614379086,\n \"acc_norm_stderr\": 0.017440820367402507\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.23636363636363636,\n\
\ \"acc_stderr\": 0.04069306319721378,\n \"acc_norm\": 0.23636363636363636,\n\
\ \"acc_norm_stderr\": 0.04069306319721378\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.20816326530612245,\n \"acc_stderr\": 0.025991117672813292,\n\
\ \"acc_norm\": 0.20816326530612245,\n \"acc_norm_stderr\": 0.025991117672813292\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.22885572139303484,\n\
\ \"acc_stderr\": 0.029705284056772432,\n \"acc_norm\": 0.22885572139303484,\n\
\ \"acc_norm_stderr\": 0.029705284056772432\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.19,\n \"acc_stderr\": 0.03942772444036624,\n \
\ \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.03942772444036624\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.19879518072289157,\n\
\ \"acc_stderr\": 0.03106939026078942,\n \"acc_norm\": 0.19879518072289157,\n\
\ \"acc_norm_stderr\": 0.03106939026078942\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.23976608187134502,\n \"acc_stderr\": 0.03274485211946956,\n\
\ \"acc_norm\": 0.23976608187134502,\n \"acc_norm_stderr\": 0.03274485211946956\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.204406364749082,\n\
\ \"mc1_stderr\": 0.014117174337432618,\n \"mc2\": 0.3392533208873607,\n\
\ \"mc2_stderr\": 0.014078645743359227\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.584846093133386,\n \"acc_stderr\": 0.013848684086658585\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.003355704697986577,\n \
\ \"em_stderr\": 0.0005922452850005238,\n \"f1\": 0.0555180369127516,\n\
\ \"f1_stderr\": 0.0013765753121727882\n },\n \"harness|gsm8k|5\":\
\ {\n \"acc\": 0.00530705079605762,\n \"acc_stderr\": 0.0020013057209480375\n\
\ }\n}\n```"
repo_url: https://huggingface.co/vihangd/shearedplats-1.3b-v1
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|arc:challenge|25_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|drop|3_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|gsm8k|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hellaswag|10_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-27-03.574383.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T21-27-03.574383.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- '**/details_harness|winogrande|5_2023-11-18T21-27-03.574383.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T21-27-03.574383.parquet'
- config_name: results
data_files:
- split: 2023_11_18T21_27_03.574383
path:
- results_2023-11-18T21-27-03.574383.parquet
- split: latest
path:
- results_2023-11-18T21-27-03.574383.parquet
---
# Dataset Card for Evaluation run of vihangd/shearedplats-1.3b-v1
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/vihangd/shearedplats-1.3b-v1
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [vihangd/shearedplats-1.3b-v1](https://huggingface.co/vihangd/shearedplats-1.3b-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_vihangd__shearedplats-1.3b-v1_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T21:27:03.574383](https://huggingface.co/datasets/open-llm-leaderboard/details_vihangd__shearedplats-1.3b-v1_public/blob/main/results_2023-11-18T21-27-03.574383.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.253847590681609,
"acc_stderr": 0.030523099331108815,
"acc_norm": 0.25573186704882195,
"acc_norm_stderr": 0.031292815233613276,
"mc1": 0.204406364749082,
"mc1_stderr": 0.014117174337432618,
"mc2": 0.3392533208873607,
"mc2_stderr": 0.014078645743359227,
"em": 0.003355704697986577,
"em_stderr": 0.0005922452850005238,
"f1": 0.0555180369127516,
"f1_stderr": 0.0013765753121727882
},
"harness|arc:challenge|25": {
"acc": 0.3174061433447099,
"acc_stderr": 0.013602239088038173,
"acc_norm": 0.35409556313993173,
"acc_norm_stderr": 0.013975454122756557
},
"harness|hellaswag|10": {
"acc": 0.4705238000398327,
"acc_stderr": 0.0049811031579404495,
"acc_norm": 0.6274646484763992,
"acc_norm_stderr": 0.004824917516374187
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768081,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768081
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.03591444084196969,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.03591444084196969
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.3223684210526316,
"acc_stderr": 0.03803510248351585,
"acc_norm": 0.3223684210526316,
"acc_norm_stderr": 0.03803510248351585
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.2528301886792453,
"acc_stderr": 0.026749899771241238,
"acc_norm": 0.2528301886792453,
"acc_norm_stderr": 0.026749899771241238
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.03476590104304134,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.03476590104304134
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.21,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.21,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.15,
"acc_stderr": 0.035887028128263714,
"acc_norm": 0.15,
"acc_norm_stderr": 0.035887028128263714
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909281,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909281
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.24855491329479767,
"acc_stderr": 0.03295304696818318,
"acc_norm": 0.24855491329479767,
"acc_norm_stderr": 0.03295304696818318
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.2647058823529412,
"acc_stderr": 0.04389869956808778,
"acc_norm": 0.2647058823529412,
"acc_norm_stderr": 0.04389869956808778
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384739,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384739
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.3276595744680851,
"acc_stderr": 0.030683020843231004,
"acc_norm": 0.3276595744680851,
"acc_norm_stderr": 0.030683020843231004
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.21929824561403508,
"acc_stderr": 0.03892431106518754,
"acc_norm": 0.21929824561403508,
"acc_norm_stderr": 0.03892431106518754
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.23448275862068965,
"acc_stderr": 0.035306258743465914,
"acc_norm": 0.23448275862068965,
"acc_norm_stderr": 0.035306258743465914
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.022569897074918417,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.022569897074918417
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.15873015873015872,
"acc_stderr": 0.03268454013011744,
"acc_norm": 0.15873015873015872,
"acc_norm_stderr": 0.03268454013011744
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.27419354838709675,
"acc_stderr": 0.025378139970885196,
"acc_norm": 0.27419354838709675,
"acc_norm_stderr": 0.025378139970885196
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.270935960591133,
"acc_stderr": 0.031270907132976984,
"acc_norm": 0.270935960591133,
"acc_norm_stderr": 0.031270907132976984
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932269,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932269
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.2606060606060606,
"acc_stderr": 0.034277431758165236,
"acc_norm": 0.2606060606060606,
"acc_norm_stderr": 0.034277431758165236
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.21717171717171718,
"acc_stderr": 0.029376616484945633,
"acc_norm": 0.21717171717171718,
"acc_norm_stderr": 0.029376616484945633
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.21243523316062177,
"acc_stderr": 0.029519282616817244,
"acc_norm": 0.21243523316062177,
"acc_norm_stderr": 0.029519282616817244
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.24615384615384617,
"acc_stderr": 0.021840866990423084,
"acc_norm": 0.24615384615384617,
"acc_norm_stderr": 0.021840866990423084
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3037037037037037,
"acc_stderr": 0.028037929969114993,
"acc_norm": 0.3037037037037037,
"acc_norm_stderr": 0.028037929969114993
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.23109243697478993,
"acc_stderr": 0.027381406927868966,
"acc_norm": 0.23109243697478993,
"acc_norm_stderr": 0.027381406927868966
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.2251655629139073,
"acc_stderr": 0.03410435282008936,
"acc_norm": 0.2251655629139073,
"acc_norm_stderr": 0.03410435282008936
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.23669724770642203,
"acc_stderr": 0.01822407811729907,
"acc_norm": 0.23669724770642203,
"acc_norm_stderr": 0.01822407811729907
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.03293377139415191,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.03293377139415191
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.2107843137254902,
"acc_stderr": 0.028626547912437388,
"acc_norm": 0.2107843137254902,
"acc_norm_stderr": 0.028626547912437388
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.27848101265822783,
"acc_stderr": 0.029178682304842548,
"acc_norm": 0.27848101265822783,
"acc_norm_stderr": 0.029178682304842548
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.336322869955157,
"acc_stderr": 0.031708824268455,
"acc_norm": 0.336322869955157,
"acc_norm_stderr": 0.031708824268455
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.22900763358778625,
"acc_stderr": 0.036853466317118506,
"acc_norm": 0.22900763358778625,
"acc_norm_stderr": 0.036853466317118506
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.38016528925619836,
"acc_stderr": 0.04431324501968432,
"acc_norm": 0.38016528925619836,
"acc_norm_stderr": 0.04431324501968432
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.23148148148148148,
"acc_stderr": 0.04077494709252628,
"acc_norm": 0.23148148148148148,
"acc_norm_stderr": 0.04077494709252628
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.22699386503067484,
"acc_stderr": 0.032910995786157686,
"acc_norm": 0.22699386503067484,
"acc_norm_stderr": 0.032910995786157686
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.23214285714285715,
"acc_stderr": 0.04007341809755805,
"acc_norm": 0.23214285714285715,
"acc_norm_stderr": 0.04007341809755805
},
"harness|hendrycksTest-management|5": {
"acc": 0.23300970873786409,
"acc_stderr": 0.041858325989283164,
"acc_norm": 0.23300970873786409,
"acc_norm_stderr": 0.041858325989283164
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.23076923076923078,
"acc_stderr": 0.027601921381417586,
"acc_norm": 0.23076923076923078,
"acc_norm_stderr": 0.027601921381417586
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.22,
"acc_stderr": 0.0416333199893227,
"acc_norm": 0.22,
"acc_norm_stderr": 0.0416333199893227
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.26947637292464877,
"acc_stderr": 0.01586624307321505,
"acc_norm": 0.26947637292464877,
"acc_norm_stderr": 0.01586624307321505
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.21676300578034682,
"acc_stderr": 0.02218347766841286,
"acc_norm": 0.21676300578034682,
"acc_norm_stderr": 0.02218347766841286
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.25251396648044694,
"acc_stderr": 0.014530330201468641,
"acc_norm": 0.25251396648044694,
"acc_norm_stderr": 0.014530330201468641
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.22549019607843138,
"acc_stderr": 0.0239291555173513,
"acc_norm": 0.22549019607843138,
"acc_norm_stderr": 0.0239291555173513
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.24437299035369775,
"acc_stderr": 0.024406162094668893,
"acc_norm": 0.24437299035369775,
"acc_norm_stderr": 0.024406162094668893
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.02540719779889016,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.02540719779889016
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.2730496453900709,
"acc_stderr": 0.02657786094330785,
"acc_norm": 0.2730496453900709,
"acc_norm_stderr": 0.02657786094330785
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.24511082138200782,
"acc_stderr": 0.010986307870045514,
"acc_norm": 0.24511082138200782,
"acc_norm_stderr": 0.010986307870045514
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.1948529411764706,
"acc_stderr": 0.024060599423487424,
"acc_norm": 0.1948529411764706,
"acc_norm_stderr": 0.024060599423487424
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.24673202614379086,
"acc_stderr": 0.017440820367402507,
"acc_norm": 0.24673202614379086,
"acc_norm_stderr": 0.017440820367402507
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.23636363636363636,
"acc_stderr": 0.04069306319721378,
"acc_norm": 0.23636363636363636,
"acc_norm_stderr": 0.04069306319721378
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.20816326530612245,
"acc_stderr": 0.025991117672813292,
"acc_norm": 0.20816326530612245,
"acc_norm_stderr": 0.025991117672813292
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.22885572139303484,
"acc_stderr": 0.029705284056772432,
"acc_norm": 0.22885572139303484,
"acc_norm_stderr": 0.029705284056772432
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.19,
"acc_stderr": 0.03942772444036624,
"acc_norm": 0.19,
"acc_norm_stderr": 0.03942772444036624
},
"harness|hendrycksTest-virology|5": {
"acc": 0.19879518072289157,
"acc_stderr": 0.03106939026078942,
"acc_norm": 0.19879518072289157,
"acc_norm_stderr": 0.03106939026078942
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.23976608187134502,
"acc_stderr": 0.03274485211946956,
"acc_norm": 0.23976608187134502,
"acc_norm_stderr": 0.03274485211946956
},
"harness|truthfulqa:mc|0": {
"mc1": 0.204406364749082,
"mc1_stderr": 0.014117174337432618,
"mc2": 0.3392533208873607,
"mc2_stderr": 0.014078645743359227
},
"harness|winogrande|5": {
"acc": 0.584846093133386,
"acc_stderr": 0.013848684086658585
},
"harness|drop|3": {
"em": 0.003355704697986577,
"em_stderr": 0.0005922452850005238,
"f1": 0.0555180369127516,
"f1_stderr": 0.0013765753121727882
},
"harness|gsm8k|5": {
"acc": 0.00530705079605762,
"acc_stderr": 0.0020013057209480375
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7146917581558228,
-0.8212088346481323,
0.2448308765888214,
0.2891528606414795,
-0.18753275275230408,
-0.04671301320195198,
0.03660169243812561,
-0.20550383627414703,
0.5896838903427124,
-0.020313696935772896,
-0.5317001938819885,
-0.700072169303894,
-0.43185940384864807,
0.2137086838483... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Riki12/RIKIFAN | Riki12 | 2023-11-18T22:23:08Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-18T22:23:08Z | 2023-11-18T21:34:26.000Z | 2023-11-18T21:34:26 | ---
license: openrail
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
sadaty515/jasdjkakjsd | sadaty515 | 2023-11-18T21:38:35Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T21:38:35Z | 2023-11-18T21:38:35.000Z | 2023-11-18T21:38:35 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_ehartford__dolphin-2.2.1-mistral-7b_public | open-llm-leaderboard | 2023-11-18T21:57:38Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T21:57:38Z | 2023-11-18T21:56:51.000Z | 2023-11-18T21:56:51 | ---
pretty_name: Evaluation run of ehartford/dolphin-2.2.1-mistral-7b
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [ehartford/dolphin-2.2.1-mistral-7b](https://huggingface.co/ehartford/dolphin-2.2.1-mistral-7b)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ehartford__dolphin-2.2.1-mistral-7b_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T21:53:53.398955](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__dolphin-2.2.1-mistral-7b_public/blob/main/results_2023-11-18T21-53-53.398955.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6280342292326917,\n\
\ \"acc_stderr\": 0.03221866029257592,\n \"acc_norm\": 0.6362757546015607,\n\
\ \"acc_norm_stderr\": 0.03290191778317961,\n \"mc1\": 0.3635250917992656,\n\
\ \"mc1_stderr\": 0.016838862883965824,\n \"mc2\": 0.5317431297461704,\n\
\ \"mc2_stderr\": 0.015046498387949936,\n \"em\": 0.004299496644295302,\n\
\ \"em_stderr\": 0.0006700586558630193,\n \"f1\": 0.081867659395973,\n\
\ \"f1_stderr\": 0.0016972961971096978\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.60580204778157,\n \"acc_stderr\": 0.014280522667467323,\n\
\ \"acc_norm\": 0.6348122866894198,\n \"acc_norm_stderr\": 0.014070265519268802\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.642899820752838,\n\
\ \"acc_stderr\": 0.004781654610857137,\n \"acc_norm\": 0.8385779725154352,\n\
\ \"acc_norm_stderr\": 0.0036716784499612135\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \
\ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n\
\ \"acc_stderr\": 0.04188307537595852,\n \"acc_norm\": 0.6222222222222222,\n\
\ \"acc_norm_stderr\": 0.04188307537595852\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6710526315789473,\n \"acc_stderr\": 0.03823428969926604,\n\
\ \"acc_norm\": 0.6710526315789473,\n \"acc_norm_stderr\": 0.03823428969926604\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n\
\ \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \
\ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.028727502957880263,\n\
\ \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880263\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n\
\ \"acc_stderr\": 0.035868792800803406,\n \"acc_norm\": 0.7569444444444444,\n\
\ \"acc_norm_stderr\": 0.035868792800803406\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \
\ \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n\
\ \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \
\ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.630057803468208,\n\
\ \"acc_stderr\": 0.0368122963339432,\n \"acc_norm\": 0.630057803468208,\n\
\ \"acc_norm_stderr\": 0.0368122963339432\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105654,\n\
\ \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105654\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n\
\ \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5404255319148936,\n \"acc_stderr\": 0.032579014820998356,\n\
\ \"acc_norm\": 0.5404255319148936,\n \"acc_norm_stderr\": 0.032579014820998356\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n\
\ \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.45614035087719296,\n\
\ \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n\
\ \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.3835978835978836,\n \"acc_stderr\": 0.025043757318520193,\n \"\
acc_norm\": 0.3835978835978836,\n \"acc_norm_stderr\": 0.025043757318520193\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n\
\ \"acc_stderr\": 0.04390259265377561,\n \"acc_norm\": 0.40476190476190477,\n\
\ \"acc_norm_stderr\": 0.04390259265377561\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.7709677419354839,\n \"acc_stderr\": 0.023904914311782655,\n \"\
acc_norm\": 0.7709677419354839,\n \"acc_norm_stderr\": 0.023904914311782655\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.4827586206896552,\n \"acc_stderr\": 0.035158955511656986,\n \"\
acc_norm\": 0.4827586206896552,\n \"acc_norm_stderr\": 0.035158955511656986\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\"\
: 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n\
\ \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586808,\n \"\
acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586808\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8756476683937824,\n \"acc_stderr\": 0.023814477086593552,\n\
\ \"acc_norm\": 0.8756476683937824,\n \"acc_norm_stderr\": 0.023814477086593552\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6538461538461539,\n \"acc_stderr\": 0.02412112541694119,\n \
\ \"acc_norm\": 0.6538461538461539,\n \"acc_norm_stderr\": 0.02412112541694119\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3296296296296296,\n \"acc_stderr\": 0.028661201116524575,\n \
\ \"acc_norm\": 0.3296296296296296,\n \"acc_norm_stderr\": 0.028661201116524575\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886786,\n\
\ \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886786\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526732,\n \"\
acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526732\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8366972477064221,\n \"acc_stderr\": 0.01584825580650155,\n \"\
acc_norm\": 0.8366972477064221,\n \"acc_norm_stderr\": 0.01584825580650155\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.49074074074074076,\n \"acc_stderr\": 0.034093869469927006,\n \"\
acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.034093869469927006\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.7696078431372549,\n \"acc_stderr\": 0.02955429260569508,\n \"\
acc_norm\": 0.7696078431372549,\n \"acc_norm_stderr\": 0.02955429260569508\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \
\ \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n\
\ \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n\
\ \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n\
\ \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.8016528925619835,\n \"acc_stderr\": 0.036401182719909456,\n \"\
acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.036401182719909456\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n\
\ \"acc_stderr\": 0.03826076324884866,\n \"acc_norm\": 0.8055555555555556,\n\
\ \"acc_norm_stderr\": 0.03826076324884866\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n\
\ \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n\
\ \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n\
\ \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n\
\ \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n\
\ \"acc_stderr\": 0.021901905115073325,\n \"acc_norm\": 0.8717948717948718,\n\
\ \"acc_norm_stderr\": 0.021901905115073325\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \
\ \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8135376756066411,\n\
\ \"acc_stderr\": 0.013927751372001505,\n \"acc_norm\": 0.8135376756066411,\n\
\ \"acc_norm_stderr\": 0.013927751372001505\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7196531791907514,\n \"acc_stderr\": 0.02418242749657762,\n\
\ \"acc_norm\": 0.7196531791907514,\n \"acc_norm_stderr\": 0.02418242749657762\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3843575418994413,\n\
\ \"acc_stderr\": 0.0162690886639594,\n \"acc_norm\": 0.3843575418994413,\n\
\ \"acc_norm_stderr\": 0.0162690886639594\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.7091503267973857,\n \"acc_stderr\": 0.02600480036395213,\n\
\ \"acc_norm\": 0.7091503267973857,\n \"acc_norm_stderr\": 0.02600480036395213\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n\
\ \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n\
\ \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7006172839506173,\n \"acc_stderr\": 0.025483115601195448,\n\
\ \"acc_norm\": 0.7006172839506173,\n \"acc_norm_stderr\": 0.025483115601195448\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.475177304964539,\n \"acc_stderr\": 0.029790719243829727,\n \
\ \"acc_norm\": 0.475177304964539,\n \"acc_norm_stderr\": 0.029790719243829727\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4452411994784876,\n\
\ \"acc_stderr\": 0.012693421303973294,\n \"acc_norm\": 0.4452411994784876,\n\
\ \"acc_norm_stderr\": 0.012693421303973294\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6433823529411765,\n \"acc_stderr\": 0.02909720956841195,\n\
\ \"acc_norm\": 0.6433823529411765,\n \"acc_norm_stderr\": 0.02909720956841195\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6535947712418301,\n \"acc_stderr\": 0.01924978569171721,\n \
\ \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.01924978569171721\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n\
\ \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n\
\ \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7183673469387755,\n \"acc_stderr\": 0.028795185574291296,\n\
\ \"acc_norm\": 0.7183673469387755,\n \"acc_norm_stderr\": 0.028795185574291296\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n\
\ \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n\
\ \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896309,\n \
\ \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896309\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n\
\ \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n\
\ \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n\
\ \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3635250917992656,\n\
\ \"mc1_stderr\": 0.016838862883965824,\n \"mc2\": 0.5317431297461704,\n\
\ \"mc2_stderr\": 0.015046498387949936\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7837411207576953,\n \"acc_stderr\": 0.011570614861409352\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.004299496644295302,\n \
\ \"em_stderr\": 0.0006700586558630193,\n \"f1\": 0.081867659395973,\n\
\ \"f1_stderr\": 0.0016972961971096978\n },\n \"harness|gsm8k|5\":\
\ {\n \"acc\": 0.21076573161485973,\n \"acc_stderr\": 0.011234280469030465\n\
\ }\n}\n```"
repo_url: https://huggingface.co/ehartford/dolphin-2.2.1-mistral-7b
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|arc:challenge|25_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|drop|3_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|gsm8k|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hellaswag|10_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-53-53.398955.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T21-53-53.398955.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- '**/details_harness|winogrande|5_2023-11-18T21-53-53.398955.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T21-53-53.398955.parquet'
- config_name: results
data_files:
- split: 2023_11_18T21_53_53.398955
path:
- results_2023-11-18T21-53-53.398955.parquet
- split: latest
path:
- results_2023-11-18T21-53-53.398955.parquet
---
# Dataset Card for Evaluation run of ehartford/dolphin-2.2.1-mistral-7b
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/ehartford/dolphin-2.2.1-mistral-7b
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [ehartford/dolphin-2.2.1-mistral-7b](https://huggingface.co/ehartford/dolphin-2.2.1-mistral-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_ehartford__dolphin-2.2.1-mistral-7b_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T21:53:53.398955](https://huggingface.co/datasets/open-llm-leaderboard/details_ehartford__dolphin-2.2.1-mistral-7b_public/blob/main/results_2023-11-18T21-53-53.398955.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6280342292326917,
"acc_stderr": 0.03221866029257592,
"acc_norm": 0.6362757546015607,
"acc_norm_stderr": 0.03290191778317961,
"mc1": 0.3635250917992656,
"mc1_stderr": 0.016838862883965824,
"mc2": 0.5317431297461704,
"mc2_stderr": 0.015046498387949936,
"em": 0.004299496644295302,
"em_stderr": 0.0006700586558630193,
"f1": 0.081867659395973,
"f1_stderr": 0.0016972961971096978
},
"harness|arc:challenge|25": {
"acc": 0.60580204778157,
"acc_stderr": 0.014280522667467323,
"acc_norm": 0.6348122866894198,
"acc_norm_stderr": 0.014070265519268802
},
"harness|hellaswag|10": {
"acc": 0.642899820752838,
"acc_stderr": 0.004781654610857137,
"acc_norm": 0.8385779725154352,
"acc_norm_stderr": 0.0036716784499612135
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6222222222222222,
"acc_stderr": 0.04188307537595852,
"acc_norm": 0.6222222222222222,
"acc_norm_stderr": 0.04188307537595852
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6710526315789473,
"acc_stderr": 0.03823428969926604,
"acc_norm": 0.6710526315789473,
"acc_norm_stderr": 0.03823428969926604
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6792452830188679,
"acc_stderr": 0.028727502957880263,
"acc_norm": 0.6792452830188679,
"acc_norm_stderr": 0.028727502957880263
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7569444444444444,
"acc_stderr": 0.035868792800803406,
"acc_norm": 0.7569444444444444,
"acc_norm_stderr": 0.035868792800803406
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.630057803468208,
"acc_stderr": 0.0368122963339432,
"acc_norm": 0.630057803468208,
"acc_norm_stderr": 0.0368122963339432
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3627450980392157,
"acc_stderr": 0.04784060704105654,
"acc_norm": 0.3627450980392157,
"acc_norm_stderr": 0.04784060704105654
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.79,
"acc_stderr": 0.04093601807403326,
"acc_norm": 0.79,
"acc_norm_stderr": 0.04093601807403326
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5404255319148936,
"acc_stderr": 0.032579014820998356,
"acc_norm": 0.5404255319148936,
"acc_norm_stderr": 0.032579014820998356
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.45614035087719296,
"acc_stderr": 0.04685473041907789,
"acc_norm": 0.45614035087719296,
"acc_norm_stderr": 0.04685473041907789
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5862068965517241,
"acc_stderr": 0.04104269211806232,
"acc_norm": 0.5862068965517241,
"acc_norm_stderr": 0.04104269211806232
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3835978835978836,
"acc_stderr": 0.025043757318520193,
"acc_norm": 0.3835978835978836,
"acc_norm_stderr": 0.025043757318520193
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.40476190476190477,
"acc_stderr": 0.04390259265377561,
"acc_norm": 0.40476190476190477,
"acc_norm_stderr": 0.04390259265377561
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7709677419354839,
"acc_stderr": 0.023904914311782655,
"acc_norm": 0.7709677419354839,
"acc_norm_stderr": 0.023904914311782655
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4827586206896552,
"acc_stderr": 0.035158955511656986,
"acc_norm": 0.4827586206896552,
"acc_norm_stderr": 0.035158955511656986
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7818181818181819,
"acc_stderr": 0.03225078108306289,
"acc_norm": 0.7818181818181819,
"acc_norm_stderr": 0.03225078108306289
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7878787878787878,
"acc_stderr": 0.029126522834586808,
"acc_norm": 0.7878787878787878,
"acc_norm_stderr": 0.029126522834586808
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8756476683937824,
"acc_stderr": 0.023814477086593552,
"acc_norm": 0.8756476683937824,
"acc_norm_stderr": 0.023814477086593552
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6538461538461539,
"acc_stderr": 0.02412112541694119,
"acc_norm": 0.6538461538461539,
"acc_norm_stderr": 0.02412112541694119
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3296296296296296,
"acc_stderr": 0.028661201116524575,
"acc_norm": 0.3296296296296296,
"acc_norm_stderr": 0.028661201116524575
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.030388353551886786,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.030388353551886786
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.31125827814569534,
"acc_stderr": 0.03780445850526732,
"acc_norm": 0.31125827814569534,
"acc_norm_stderr": 0.03780445850526732
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8366972477064221,
"acc_stderr": 0.01584825580650155,
"acc_norm": 0.8366972477064221,
"acc_norm_stderr": 0.01584825580650155
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.49074074074074076,
"acc_stderr": 0.034093869469927006,
"acc_norm": 0.49074074074074076,
"acc_norm_stderr": 0.034093869469927006
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7696078431372549,
"acc_stderr": 0.02955429260569508,
"acc_norm": 0.7696078431372549,
"acc_norm_stderr": 0.02955429260569508
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7805907172995781,
"acc_stderr": 0.026939106581553945,
"acc_norm": 0.7805907172995781,
"acc_norm_stderr": 0.026939106581553945
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6816143497757847,
"acc_stderr": 0.03126580522513713,
"acc_norm": 0.6816143497757847,
"acc_norm_stderr": 0.03126580522513713
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7862595419847328,
"acc_stderr": 0.0359546161177469,
"acc_norm": 0.7862595419847328,
"acc_norm_stderr": 0.0359546161177469
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8016528925619835,
"acc_stderr": 0.036401182719909456,
"acc_norm": 0.8016528925619835,
"acc_norm_stderr": 0.036401182719909456
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8055555555555556,
"acc_stderr": 0.03826076324884866,
"acc_norm": 0.8055555555555556,
"acc_norm_stderr": 0.03826076324884866
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7607361963190185,
"acc_stderr": 0.033519538795212696,
"acc_norm": 0.7607361963190185,
"acc_norm_stderr": 0.033519538795212696
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4732142857142857,
"acc_stderr": 0.047389751192741546,
"acc_norm": 0.4732142857142857,
"acc_norm_stderr": 0.047389751192741546
},
"harness|hendrycksTest-management|5": {
"acc": 0.7766990291262136,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.7766990291262136,
"acc_norm_stderr": 0.04123553189891431
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8717948717948718,
"acc_stderr": 0.021901905115073325,
"acc_norm": 0.8717948717948718,
"acc_norm_stderr": 0.021901905115073325
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8135376756066411,
"acc_stderr": 0.013927751372001505,
"acc_norm": 0.8135376756066411,
"acc_norm_stderr": 0.013927751372001505
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7196531791907514,
"acc_stderr": 0.02418242749657762,
"acc_norm": 0.7196531791907514,
"acc_norm_stderr": 0.02418242749657762
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.3843575418994413,
"acc_stderr": 0.0162690886639594,
"acc_norm": 0.3843575418994413,
"acc_norm_stderr": 0.0162690886639594
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7091503267973857,
"acc_stderr": 0.02600480036395213,
"acc_norm": 0.7091503267973857,
"acc_norm_stderr": 0.02600480036395213
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7170418006430869,
"acc_stderr": 0.02558306248998481,
"acc_norm": 0.7170418006430869,
"acc_norm_stderr": 0.02558306248998481
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7006172839506173,
"acc_stderr": 0.025483115601195448,
"acc_norm": 0.7006172839506173,
"acc_norm_stderr": 0.025483115601195448
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.475177304964539,
"acc_stderr": 0.029790719243829727,
"acc_norm": 0.475177304964539,
"acc_norm_stderr": 0.029790719243829727
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4452411994784876,
"acc_stderr": 0.012693421303973294,
"acc_norm": 0.4452411994784876,
"acc_norm_stderr": 0.012693421303973294
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6433823529411765,
"acc_stderr": 0.02909720956841195,
"acc_norm": 0.6433823529411765,
"acc_norm_stderr": 0.02909720956841195
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6535947712418301,
"acc_stderr": 0.01924978569171721,
"acc_norm": 0.6535947712418301,
"acc_norm_stderr": 0.01924978569171721
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6545454545454545,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.6545454545454545,
"acc_norm_stderr": 0.04554619617541054
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7183673469387755,
"acc_stderr": 0.028795185574291296,
"acc_norm": 0.7183673469387755,
"acc_norm_stderr": 0.028795185574291296
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8407960199004975,
"acc_stderr": 0.025870646766169136,
"acc_norm": 0.8407960199004975,
"acc_norm_stderr": 0.025870646766169136
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.87,
"acc_stderr": 0.03379976689896309,
"acc_norm": 0.87,
"acc_norm_stderr": 0.03379976689896309
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5421686746987951,
"acc_stderr": 0.0387862677100236,
"acc_norm": 0.5421686746987951,
"acc_norm_stderr": 0.0387862677100236
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8304093567251462,
"acc_stderr": 0.02878210810540171,
"acc_norm": 0.8304093567251462,
"acc_norm_stderr": 0.02878210810540171
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3635250917992656,
"mc1_stderr": 0.016838862883965824,
"mc2": 0.5317431297461704,
"mc2_stderr": 0.015046498387949936
},
"harness|winogrande|5": {
"acc": 0.7837411207576953,
"acc_stderr": 0.011570614861409352
},
"harness|drop|3": {
"em": 0.004299496644295302,
"em_stderr": 0.0006700586558630193,
"f1": 0.081867659395973,
"f1_stderr": 0.0016972961971096978
},
"harness|gsm8k|5": {
"acc": 0.21076573161485973,
"acc_stderr": 0.011234280469030465
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7470908761024475,
-0.8400171995162964,
0.2884361147880554,
0.188236266374588,
-0.19393503665924072,
-0.09931913763284683,
0.04565844684839249,
-0.256987065076828,
0.5640522241592407,
-0.012308500707149506,
-0.48672789335250854,
-0.6713775396347046,
-0.43287089467048645,
0.24744631350040... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_migtissera__Tess-XS-v1.0_public | open-llm-leaderboard | 2023-11-18T21:59:14Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T21:59:14Z | 2023-11-18T21:58:26.000Z | 2023-11-18T21:58:26 | ---
pretty_name: Evaluation run of migtissera/Tess-XS-v1.0
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [migtissera/Tess-XS-v1.0](https://huggingface.co/migtissera/Tess-XS-v1.0) on the\
\ [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_migtissera__Tess-XS-v1.0_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T21:55:23.260774](https://huggingface.co/datasets/open-llm-leaderboard/details_migtissera__Tess-XS-v1.0_public/blob/main/results_2023-11-18T21-55-23.260774.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6348258267763893,\n\
\ \"acc_stderr\": 0.03230372610827704,\n \"acc_norm\": 0.6439271893072561,\n\
\ \"acc_norm_stderr\": 0.03300134321723649,\n \"mc1\": 0.3157894736842105,\n\
\ \"mc1_stderr\": 0.016272287957916916,\n \"mc2\": 0.4712323822712203,\n\
\ \"mc2_stderr\": 0.014554223298121486,\n \"em\": 0.0018875838926174498,\n\
\ \"em_stderr\": 0.0004445109990558992,\n \"f1\": 0.061799496644295286,\n\
\ \"f1_stderr\": 0.0013795660027086077\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5750853242320819,\n \"acc_stderr\": 0.014445698968520769,\n\
\ \"acc_norm\": 0.6143344709897611,\n \"acc_norm_stderr\": 0.014224250973257182\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6381198964349731,\n\
\ \"acc_stderr\": 0.00479562275732714,\n \"acc_norm\": 0.8381796454889464,\n\
\ \"acc_norm_stderr\": 0.003675332590681066\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n\
\ \"acc_stderr\": 0.04153948404742398,\n \"acc_norm\": 0.6370370370370371,\n\
\ \"acc_norm_stderr\": 0.04153948404742398\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6644736842105263,\n \"acc_stderr\": 0.03842498559395268,\n\
\ \"acc_norm\": 0.6644736842105263,\n \"acc_norm_stderr\": 0.03842498559395268\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n\
\ \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \
\ \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6981132075471698,\n \"acc_stderr\": 0.028254200344438662,\n\
\ \"acc_norm\": 0.6981132075471698,\n \"acc_norm_stderr\": 0.028254200344438662\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7013888888888888,\n\
\ \"acc_stderr\": 0.03827052357950756,\n \"acc_norm\": 0.7013888888888888,\n\
\ \"acc_norm_stderr\": 0.03827052357950756\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \
\ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\"\
: 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \
\ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\
\ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6358381502890174,\n\
\ \"acc_stderr\": 0.03669072477416906,\n \"acc_norm\": 0.6358381502890174,\n\
\ \"acc_norm_stderr\": 0.03669072477416906\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n\
\ \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n\
\ \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n\
\ \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n\
\ \"acc_stderr\": 0.04697085136647863,\n \"acc_norm\": 0.47368421052631576,\n\
\ \"acc_norm_stderr\": 0.04697085136647863\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n\
\ \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.3994708994708995,\n \"acc_stderr\": 0.02522545028406788,\n \"\
acc_norm\": 0.3994708994708995,\n \"acc_norm_stderr\": 0.02522545028406788\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3968253968253968,\n\
\ \"acc_stderr\": 0.043758884927270605,\n \"acc_norm\": 0.3968253968253968,\n\
\ \"acc_norm_stderr\": 0.043758884927270605\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \
\ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.7677419354838709,\n \"acc_stderr\": 0.024022256130308235,\n \"\
acc_norm\": 0.7677419354838709,\n \"acc_norm_stderr\": 0.024022256130308235\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"\
acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\"\
: 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7515151515151515,\n \"acc_stderr\": 0.03374402644139403,\n\
\ \"acc_norm\": 0.7515151515151515,\n \"acc_norm_stderr\": 0.03374402644139403\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7727272727272727,\n \"acc_stderr\": 0.029857515673386414,\n \"\
acc_norm\": 0.7727272727272727,\n \"acc_norm_stderr\": 0.029857515673386414\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919443,\n\
\ \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919443\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635477,\n\
\ \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635477\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616258,\n \
\ \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616258\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886793,\n\
\ \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886793\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"\
acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8275229357798165,\n \"acc_stderr\": 0.01619780795684805,\n \"\
acc_norm\": 0.8275229357798165,\n \"acc_norm_stderr\": 0.01619780795684805\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5324074074074074,\n \"acc_stderr\": 0.03402801581358966,\n \"\
acc_norm\": 0.5324074074074074,\n \"acc_norm_stderr\": 0.03402801581358966\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.8137254901960784,\n \"acc_stderr\": 0.027325470966716312,\n \"\
acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.027325470966716312\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7721518987341772,\n \"acc_stderr\": 0.027303484599069425,\n \
\ \"acc_norm\": 0.7721518987341772,\n \"acc_norm_stderr\": 0.027303484599069425\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n\
\ \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n\
\ \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n\
\ \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"\
acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n\
\ \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n\
\ \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.8098159509202454,\n \"acc_stderr\": 0.030833491146281235,\n\
\ \"acc_norm\": 0.8098159509202454,\n \"acc_norm_stderr\": 0.030833491146281235\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5178571428571429,\n\
\ \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.5178571428571429,\n\
\ \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822585,\n\
\ \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822585\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n\
\ \"acc_stderr\": 0.023086635086841407,\n \"acc_norm\": 0.8547008547008547,\n\
\ \"acc_norm_stderr\": 0.023086635086841407\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384739,\n \
\ \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384739\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8173690932311622,\n\
\ \"acc_stderr\": 0.013816335389973136,\n \"acc_norm\": 0.8173690932311622,\n\
\ \"acc_norm_stderr\": 0.013816335389973136\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7167630057803468,\n \"acc_stderr\": 0.024257901705323374,\n\
\ \"acc_norm\": 0.7167630057803468,\n \"acc_norm_stderr\": 0.024257901705323374\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.33519553072625696,\n\
\ \"acc_stderr\": 0.015788007190185884,\n \"acc_norm\": 0.33519553072625696,\n\
\ \"acc_norm_stderr\": 0.015788007190185884\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.738562091503268,\n \"acc_stderr\": 0.025160998214292456,\n\
\ \"acc_norm\": 0.738562091503268,\n \"acc_norm_stderr\": 0.025160998214292456\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6945337620578779,\n\
\ \"acc_stderr\": 0.026160584450140453,\n \"acc_norm\": 0.6945337620578779,\n\
\ \"acc_norm_stderr\": 0.026160584450140453\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.02465968518596728,\n\
\ \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.02465968518596728\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.5212765957446809,\n \"acc_stderr\": 0.029800481645628693,\n \
\ \"acc_norm\": 0.5212765957446809,\n \"acc_norm_stderr\": 0.029800481645628693\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4485006518904824,\n\
\ \"acc_stderr\": 0.012702317490559807,\n \"acc_norm\": 0.4485006518904824,\n\
\ \"acc_norm_stderr\": 0.012702317490559807\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.028332959514031215,\n\
\ \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.028332959514031215\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6781045751633987,\n \"acc_stderr\": 0.018901015322093092,\n \
\ \"acc_norm\": 0.6781045751633987,\n \"acc_norm_stderr\": 0.018901015322093092\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n\
\ \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n\
\ \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.746938775510204,\n \"acc_stderr\": 0.02783302387139968,\n\
\ \"acc_norm\": 0.746938775510204,\n \"acc_norm_stderr\": 0.02783302387139968\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n\
\ \"acc_stderr\": 0.02553843336857833,\n \"acc_norm\": 0.845771144278607,\n\
\ \"acc_norm_stderr\": 0.02553843336857833\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \
\ \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n\
\ \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n\
\ \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.02917088550072767,\n\
\ \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.02917088550072767\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3157894736842105,\n\
\ \"mc1_stderr\": 0.016272287957916916,\n \"mc2\": 0.4712323822712203,\n\
\ \"mc2_stderr\": 0.014554223298121486\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7892659826361483,\n \"acc_stderr\": 0.011462046419710676\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.0018875838926174498,\n \
\ \"em_stderr\": 0.0004445109990558992,\n \"f1\": 0.061799496644295286,\n\
\ \"f1_stderr\": 0.0013795660027086077\n },\n \"harness|gsm8k|5\":\
\ {\n \"acc\": 0.18271417740712662,\n \"acc_stderr\": 0.010644258206326236\n\
\ }\n}\n```"
repo_url: https://huggingface.co/migtissera/Tess-XS-v1.0
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|arc:challenge|25_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|drop|3_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|gsm8k|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hellaswag|10_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-55-23.260774.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T21-55-23.260774.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- '**/details_harness|winogrande|5_2023-11-18T21-55-23.260774.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T21-55-23.260774.parquet'
- config_name: results
data_files:
- split: 2023_11_18T21_55_23.260774
path:
- results_2023-11-18T21-55-23.260774.parquet
- split: latest
path:
- results_2023-11-18T21-55-23.260774.parquet
---
# Dataset Card for Evaluation run of migtissera/Tess-XS-v1.0
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/migtissera/Tess-XS-v1.0
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [migtissera/Tess-XS-v1.0](https://huggingface.co/migtissera/Tess-XS-v1.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_migtissera__Tess-XS-v1.0_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T21:55:23.260774](https://huggingface.co/datasets/open-llm-leaderboard/details_migtissera__Tess-XS-v1.0_public/blob/main/results_2023-11-18T21-55-23.260774.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6348258267763893,
"acc_stderr": 0.03230372610827704,
"acc_norm": 0.6439271893072561,
"acc_norm_stderr": 0.03300134321723649,
"mc1": 0.3157894736842105,
"mc1_stderr": 0.016272287957916916,
"mc2": 0.4712323822712203,
"mc2_stderr": 0.014554223298121486,
"em": 0.0018875838926174498,
"em_stderr": 0.0004445109990558992,
"f1": 0.061799496644295286,
"f1_stderr": 0.0013795660027086077
},
"harness|arc:challenge|25": {
"acc": 0.5750853242320819,
"acc_stderr": 0.014445698968520769,
"acc_norm": 0.6143344709897611,
"acc_norm_stderr": 0.014224250973257182
},
"harness|hellaswag|10": {
"acc": 0.6381198964349731,
"acc_stderr": 0.00479562275732714,
"acc_norm": 0.8381796454889464,
"acc_norm_stderr": 0.003675332590681066
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.047609522856952365,
"acc_norm": 0.34,
"acc_norm_stderr": 0.047609522856952365
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6370370370370371,
"acc_stderr": 0.04153948404742398,
"acc_norm": 0.6370370370370371,
"acc_norm_stderr": 0.04153948404742398
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6644736842105263,
"acc_stderr": 0.03842498559395268,
"acc_norm": 0.6644736842105263,
"acc_norm_stderr": 0.03842498559395268
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237102
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6981132075471698,
"acc_stderr": 0.028254200344438662,
"acc_norm": 0.6981132075471698,
"acc_norm_stderr": 0.028254200344438662
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7013888888888888,
"acc_stderr": 0.03827052357950756,
"acc_norm": 0.7013888888888888,
"acc_norm_stderr": 0.03827052357950756
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6358381502890174,
"acc_stderr": 0.03669072477416906,
"acc_norm": 0.6358381502890174,
"acc_norm_stderr": 0.03669072477416906
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4215686274509804,
"acc_stderr": 0.04913595201274498,
"acc_norm": 0.4215686274509804,
"acc_norm_stderr": 0.04913595201274498
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5829787234042553,
"acc_stderr": 0.03223276266711712,
"acc_norm": 0.5829787234042553,
"acc_norm_stderr": 0.03223276266711712
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.47368421052631576,
"acc_stderr": 0.04697085136647863,
"acc_norm": 0.47368421052631576,
"acc_norm_stderr": 0.04697085136647863
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5448275862068965,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.5448275862068965,
"acc_norm_stderr": 0.04149886942192117
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3994708994708995,
"acc_stderr": 0.02522545028406788,
"acc_norm": 0.3994708994708995,
"acc_norm_stderr": 0.02522545028406788
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.3968253968253968,
"acc_stderr": 0.043758884927270605,
"acc_norm": 0.3968253968253968,
"acc_norm_stderr": 0.043758884927270605
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7677419354838709,
"acc_stderr": 0.024022256130308235,
"acc_norm": 0.7677419354838709,
"acc_norm_stderr": 0.024022256130308235
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5024630541871922,
"acc_stderr": 0.035179450386910616,
"acc_norm": 0.5024630541871922,
"acc_norm_stderr": 0.035179450386910616
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7515151515151515,
"acc_stderr": 0.03374402644139403,
"acc_norm": 0.7515151515151515,
"acc_norm_stderr": 0.03374402644139403
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7727272727272727,
"acc_stderr": 0.029857515673386414,
"acc_norm": 0.7727272727272727,
"acc_norm_stderr": 0.029857515673386414
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8860103626943006,
"acc_stderr": 0.022935144053919443,
"acc_norm": 0.8860103626943006,
"acc_norm_stderr": 0.022935144053919443
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6564102564102564,
"acc_stderr": 0.024078696580635477,
"acc_norm": 0.6564102564102564,
"acc_norm_stderr": 0.024078696580635477
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.34814814814814815,
"acc_stderr": 0.029045600290616258,
"acc_norm": 0.34814814814814815,
"acc_norm_stderr": 0.029045600290616258
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.030388353551886793,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.030388353551886793
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.33112582781456956,
"acc_stderr": 0.038425817186598696,
"acc_norm": 0.33112582781456956,
"acc_norm_stderr": 0.038425817186598696
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8275229357798165,
"acc_stderr": 0.01619780795684805,
"acc_norm": 0.8275229357798165,
"acc_norm_stderr": 0.01619780795684805
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5324074074074074,
"acc_stderr": 0.03402801581358966,
"acc_norm": 0.5324074074074074,
"acc_norm_stderr": 0.03402801581358966
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8137254901960784,
"acc_stderr": 0.027325470966716312,
"acc_norm": 0.8137254901960784,
"acc_norm_stderr": 0.027325470966716312
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7721518987341772,
"acc_stderr": 0.027303484599069425,
"acc_norm": 0.7721518987341772,
"acc_norm_stderr": 0.027303484599069425
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.695067264573991,
"acc_stderr": 0.030898610882477515,
"acc_norm": 0.695067264573991,
"acc_norm_stderr": 0.030898610882477515
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7862595419847328,
"acc_stderr": 0.0359546161177469,
"acc_norm": 0.7862595419847328,
"acc_norm_stderr": 0.0359546161177469
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.03695980128098824,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.03695980128098824
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.040191074725573483,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.040191074725573483
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8098159509202454,
"acc_stderr": 0.030833491146281235,
"acc_norm": 0.8098159509202454,
"acc_norm_stderr": 0.030833491146281235
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5178571428571429,
"acc_stderr": 0.047427623612430116,
"acc_norm": 0.5178571428571429,
"acc_norm_stderr": 0.047427623612430116
},
"harness|hendrycksTest-management|5": {
"acc": 0.8058252427184466,
"acc_stderr": 0.03916667762822585,
"acc_norm": 0.8058252427184466,
"acc_norm_stderr": 0.03916667762822585
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8547008547008547,
"acc_stderr": 0.023086635086841407,
"acc_norm": 0.8547008547008547,
"acc_norm_stderr": 0.023086635086841407
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.73,
"acc_stderr": 0.04461960433384739,
"acc_norm": 0.73,
"acc_norm_stderr": 0.04461960433384739
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8173690932311622,
"acc_stderr": 0.013816335389973136,
"acc_norm": 0.8173690932311622,
"acc_norm_stderr": 0.013816335389973136
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7167630057803468,
"acc_stderr": 0.024257901705323374,
"acc_norm": 0.7167630057803468,
"acc_norm_stderr": 0.024257901705323374
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.33519553072625696,
"acc_stderr": 0.015788007190185884,
"acc_norm": 0.33519553072625696,
"acc_norm_stderr": 0.015788007190185884
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.738562091503268,
"acc_stderr": 0.025160998214292456,
"acc_norm": 0.738562091503268,
"acc_norm_stderr": 0.025160998214292456
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6945337620578779,
"acc_stderr": 0.026160584450140453,
"acc_norm": 0.6945337620578779,
"acc_norm_stderr": 0.026160584450140453
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7314814814814815,
"acc_stderr": 0.02465968518596728,
"acc_norm": 0.7314814814814815,
"acc_norm_stderr": 0.02465968518596728
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.5212765957446809,
"acc_stderr": 0.029800481645628693,
"acc_norm": 0.5212765957446809,
"acc_norm_stderr": 0.029800481645628693
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4485006518904824,
"acc_stderr": 0.012702317490559807,
"acc_norm": 0.4485006518904824,
"acc_norm_stderr": 0.012702317490559807
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6801470588235294,
"acc_stderr": 0.028332959514031215,
"acc_norm": 0.6801470588235294,
"acc_norm_stderr": 0.028332959514031215
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6781045751633987,
"acc_stderr": 0.018901015322093092,
"acc_norm": 0.6781045751633987,
"acc_norm_stderr": 0.018901015322093092
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6545454545454545,
"acc_stderr": 0.04554619617541054,
"acc_norm": 0.6545454545454545,
"acc_norm_stderr": 0.04554619617541054
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.746938775510204,
"acc_stderr": 0.02783302387139968,
"acc_norm": 0.746938775510204,
"acc_norm_stderr": 0.02783302387139968
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.845771144278607,
"acc_stderr": 0.02553843336857833,
"acc_norm": 0.845771144278607,
"acc_norm_stderr": 0.02553843336857833
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.84,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.84,
"acc_norm_stderr": 0.03684529491774709
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5301204819277109,
"acc_stderr": 0.03885425420866767,
"acc_norm": 0.5301204819277109,
"acc_norm_stderr": 0.03885425420866767
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8245614035087719,
"acc_stderr": 0.02917088550072767,
"acc_norm": 0.8245614035087719,
"acc_norm_stderr": 0.02917088550072767
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3157894736842105,
"mc1_stderr": 0.016272287957916916,
"mc2": 0.4712323822712203,
"mc2_stderr": 0.014554223298121486
},
"harness|winogrande|5": {
"acc": 0.7892659826361483,
"acc_stderr": 0.011462046419710676
},
"harness|drop|3": {
"em": 0.0018875838926174498,
"em_stderr": 0.0004445109990558992,
"f1": 0.061799496644295286,
"f1_stderr": 0.0013795660027086077
},
"harness|gsm8k|5": {
"acc": 0.18271417740712662,
"acc_stderr": 0.010644258206326236
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7204800248146057,
-0.7624870538711548,
0.30389171838760376,
0.21560965478420258,
-0.20500415563583374,
-0.04143986105918884,
0.011310381814837456,
-0.2040226012468338,
0.5891261696815491,
-0.07670851051807404,
-0.5077674984931946,
-0.7335145473480225,
-0.506820797920227,
0.2324278950691... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
antonshenk/grid_with_dates.json | antonshenk | 2023-11-18T22:41:15Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T22:41:15Z | 2023-11-18T22:07:15.000Z | 2023-11-18T22:07:15 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DBQ/Celine.Product.prices.Germany | DBQ | 2023-11-18T22:12:38Z | 0 | 0 | null | [
"task_categories:text-classification",
"task_categories:image-classification",
"task_categories:feature-extraction",
"task_categories:image-segmentation",
"task_categories:image-to-image",
"task_categories:image-to-text",
"task_categories:object-detection",
"task_categories:summarization",
"task_cat... | 2023-11-18T22:12:38Z | 2023-11-18T22:12:33.000Z | 2023-11-18T22:12:33 | ---
annotations_creators:
- other
language_creators:
- other
language:
- en
license:
- unknown
multilinguality:
- monolingual
source_datasets:
- original
task_categories:
- text-classification
- image-classification
- feature-extraction
- image-segmentation
- image-to-image
- image-to-text
- object-detection
- summarization
- zero-shot-image-classification
pretty_name: Germany - Celine - Product-level price list
tags:
- webscraping
- ecommerce
- Celine
- fashion
- fashion product
- image
- fashion image
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: website_name
dtype: string
- name: competence_date
dtype: string
- name: country_code
dtype: string
- name: currency_code
dtype: string
- name: brand
dtype: string
- name: category1_code
dtype: string
- name: category2_code
dtype: string
- name: category3_code
dtype: string
- name: product_code
dtype: string
- name: title
dtype: string
- name: itemurl
dtype: string
- name: imageurl
dtype: string
- name: full_price
dtype: float64
- name: price
dtype: float64
- name: full_price_eur
dtype: float64
- name: price_eur
dtype: float64
- name: flg_discount
dtype: int64
splits:
- name: train
num_bytes: 319426
num_examples: 655
download_size: 78524
dataset_size: 319426
---
# Celine web scraped data
## About the website
Celine operates within the **fashion industry** in the EMEA region, particularly in **Germany**. This industry is currently undergoing digital transformation with a focus on **Ecommerce**, creating a competitive space for established and emerging fashion brands. In Germany, the local customer using various online stores has shown significant growth, displaying an increased interest in online fashion shopping. This pattern presents the fashion brands with a unique set of challenges and opportunities. The dataset observed includes **Ecommerce product-list page (PLP) data** on **Celine** in Germany, offering priceless insights into consumer behavior and preferences, allowing the brand to reach their target market more effectively.
## Link to **dataset**
[Germany - Celine - Product-level price list dataset](https://www.databoutique.com/buy-data-page/Celine%20Product-prices%20Germany/r/rec14W2uH4yIsDx2F)
| [
-0.3664393126964569,
-0.505483090877533,
0.22408372163772583,
0.39052483439445496,
-0.33718788623809814,
0.16706272959709167,
0.2738020420074463,
-0.6799159646034241,
0.5341001749038696,
0.627851128578186,
-1.119215965270996,
-0.8486760854721069,
0.03585885465145111,
-0.006885759066790342,... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DBQ/Net.a.Porter.Product.prices.Portugal | DBQ | 2023-11-18T22:12:54Z | 0 | 0 | null | [
"task_categories:text-classification",
"task_categories:image-classification",
"task_categories:feature-extraction",
"task_categories:image-segmentation",
"task_categories:image-to-image",
"task_categories:image-to-text",
"task_categories:object-detection",
"task_categories:summarization",
"task_cat... | 2023-11-18T22:12:54Z | 2023-11-18T22:12:46.000Z | 2023-11-18T22:12:46 | ---
annotations_creators:
- other
language_creators:
- other
language:
- en
license:
- unknown
multilinguality:
- monolingual
source_datasets:
- original
task_categories:
- text-classification
- image-classification
- feature-extraction
- image-segmentation
- image-to-image
- image-to-text
- object-detection
- summarization
- zero-shot-image-classification
pretty_name: Portugal - Net-a-Porter - Product-level price list
tags:
- webscraping
- ecommerce
- Net
- fashion
- fashion product
- image
- fashion image
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: Net-a-Porter
dtype: string
- name: '2023-11-08'
dtype: string
- name: PRT
dtype: string
- name: EUR
dtype: string
- name: SAINT LAURENT
dtype: string
- name: BAGS
dtype: string
- name: SHOULDER BAGS
dtype: string
- name: CROSS BODY
dtype: string
- name: '33258524072235985'
dtype: int64
- name: Loulou Toy quilted leather shoulder bag
dtype: string
- name: https://www.net-a-porter.com/pt/en/shop/product/saint-laurent/bags/cross-body/loulou-toy-quilted-leather-shoulder-bag/33258524072235985
dtype: string
- name: https://www.net-a-porter.com/variants/images/33258524072235985/ou/w1000.jpg
dtype: string
- name: '1490.00'
dtype: float64
- name: 1490.00.1
dtype: float64
- name: 1490.00.2
dtype: float64
- name: 1490.00.3
dtype: float64
- name: '0'
dtype: int64
splits:
- name: train
num_bytes: 18057389
num_examples: 44280
download_size: 5682167
dataset_size: 18057389
---
# Net-a-Porter web scraped data
## About the website
Net-a-Porter operates within the thriving **Ecommerce industry** across the EMEA region, with a strong foothold in the market of **Portugal** particularly. The brand is renowned for its luxury fashion offerings online, making it a key player in the digital retail landscape. They cater to tastes ranging from high street to haute couture, offering a wide variety of products. A data analysis has been conducted on Net-a-Porters **Ecommerce product-list page (PLP)** in Portugal. These PLP data sets offer crucial insights on product performance, customer preferences, and market trends, which are critical towards shaping the brand’s strategies in the competitive online fashion space.
## Link to **dataset**
[Portugal - Net-a-Porter - Product-level price list dataset](https://www.databoutique.com/buy-data-page/Net-a-Porter%20Product-prices%20Portugal/r/recA0xr8F85lVPMgr)
| [
-0.5441004633903503,
-0.3890414237976074,
-0.27050212025642395,
0.7031242251396179,
-0.3057191073894501,
0.17558200657367706,
0.39546117186546326,
-0.7341078519821167,
0.6208510994911194,
0.46976345777511597,
-0.8204150199890137,
-0.6654409766197205,
-0.20300734043121338,
-0.07332935184240... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DBQ/Prada.Product.prices.Singapore | DBQ | 2023-11-18T22:12:59Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T22:12:59Z | 2023-11-18T22:12:58.000Z | 2023-11-18T22:12:58 | # Prada web scraped data
## About the website
The **fashion industry** in the Asia Pacific, specifically in **Singapore**, continues to evolve and flourish. This growth is mainly driven by **luxury fashion brands** such as **Prada**. With the increasing accessibility due to digital platforms, many customers now are more inclined towards online shopping. Hence, Prada, like many other high-end retailers, has placed significant emphasis on its **Ecommerce strategy**. The brand is constantly aiming to improve the online shopping experience by enhancing product visibility and accessibility for its potential customers. The dataset observed contains **Ecommerce product-list page (PLP) data** on Prada in Singapore, which provides valuable insights into the performance and customer interaction with the digital platform.
## Link to **dataset**
[Singapore - Prada - Product-level price list dataset](https://www.databoutique.com/buy-data-page/Prada%20Product-prices%20Singapore/r/recmztprvWS6saZ2U)
| [
-0.3035084307193756,
-0.42892563343048096,
-0.07250738143920898,
0.29732146859169006,
-0.7783701419830322,
0.22110044956207275,
0.04461922124028206,
-0.6591383218765259,
0.4647376239299774,
0.16540884971618652,
-0.8820317387580872,
-0.5953351259231567,
0.18799610435962677,
0.03087115660309... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DBQ/Net.a.Porter.Product.prices.Italy | DBQ | 2023-11-18T22:14:35Z | 0 | 0 | null | [
"task_categories:text-classification",
"task_categories:image-classification",
"task_categories:feature-extraction",
"task_categories:image-segmentation",
"task_categories:image-to-image",
"task_categories:image-to-text",
"task_categories:object-detection",
"task_categories:summarization",
"task_cat... | 2023-11-18T22:14:35Z | 2023-11-18T22:14:27.000Z | 2023-11-18T22:14:27 | ---
annotations_creators:
- other
language_creators:
- other
language:
- en
license:
- unknown
multilinguality:
- monolingual
source_datasets:
- original
task_categories:
- text-classification
- image-classification
- feature-extraction
- image-segmentation
- image-to-image
- image-to-text
- object-detection
- summarization
- zero-shot-image-classification
pretty_name: Italy - Net-a-Porter - Product-level price list
tags:
- webscraping
- ecommerce
- Net
- fashion
- fashion product
- image
- fashion image
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: Net-a-Porter
dtype: string
- name: '2023-11-08'
dtype: string
- name: ITA
dtype: string
- name: EUR
dtype: string
- name: SAINT LAURENT
dtype: string
- name: CLOTHING
dtype: string
- name: DRESSES
dtype: string
- name: MIDI DRESSES
dtype: string
- name: '1647597276844592'
dtype: int64
- name: Lace-trimmed silk-satin midi dress
dtype: string
- name: https://www.net-a-porter.com/it/en/shop/product/saint-laurent/clothing/midi-dresses/lace-trimmed-silk-satin-midi-dress/1647597276844592
dtype: string
- name: https://www.net-a-porter.com/variants/images/1647597276844592/ou/w1000.jpg
dtype: string
- name: '3490.00'
dtype: float64
- name: 3490.00.1
dtype: float64
- name: 3490.00.2
dtype: float64
- name: 3490.00.3
dtype: float64
- name: '0'
dtype: int64
splits:
- name: train
num_bytes: 17591923
num_examples: 43148
download_size: 5140785
dataset_size: 17591923
---
# Net-a-Porter web scraped data
## About the website
The **Ecommerce** industry in the EMEA region, particularly in **Italy**, has seen significant growth due to digital transformation and increased web shopping habits. Within this sector, the luxury fashion industry is a standout, where **Net-a-Porter** operates. This platform effectively fuses traditional haute couture and digital luxury shopping, offering an extensive range of high-end clothing and accessories. The observed dataset contains **Ecommerce product-list page (PLP)** data on Net-a-Porter in Italy, providing insight on user behavior, product attractiveness and market trends. This type of data enables customization and effectiveness of digital marketing strategies for the brand.
## Link to **dataset**
[Italy - Net-a-Porter - Product-level price list dataset](https://www.databoutique.com/buy-data-page/Net-a-Porter%20Product-prices%20Italy/r/recAG83il5B3oEP18)
| [
-0.48711833357810974,
-0.49228671193122864,
-0.06190973520278931,
0.5596330761909485,
-0.2383631020784378,
0.10161828994750977,
0.4399326741695404,
-0.7013539671897888,
0.5931122899055481,
0.35668858885765076,
-0.9886167049407959,
-0.6918115019798279,
-0.0040105064399540424,
-0.01870880648... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DBQ/Net.a.Porter.Product.prices.India | DBQ | 2023-11-18T22:14:49Z | 0 | 0 | null | [
"task_categories:text-classification",
"task_categories:image-classification",
"task_categories:feature-extraction",
"task_categories:image-segmentation",
"task_categories:image-to-image",
"task_categories:image-to-text",
"task_categories:object-detection",
"task_categories:summarization",
"task_cat... | 2023-11-18T22:14:49Z | 2023-11-18T22:14:41.000Z | 2023-11-18T22:14:41 | ---
annotations_creators:
- other
language_creators:
- other
language:
- en
license:
- unknown
multilinguality:
- monolingual
source_datasets:
- original
task_categories:
- text-classification
- image-classification
- feature-extraction
- image-segmentation
- image-to-image
- image-to-text
- object-detection
- summarization
- zero-shot-image-classification
pretty_name: India - Net-a-Porter - Product-level price list
tags:
- webscraping
- ecommerce
- Net
- fashion
- fashion product
- image
- fashion image
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: Net-a-Porter
dtype: string
- name: '2023-11-08'
dtype: string
- name: IND
dtype: string
- name: USD
dtype: string
- name: KHAITE
dtype: string
- name: CLOTHING
dtype: string
- name: DRESSES
dtype: string
- name: MINI DRESSES
dtype: string
- name: '1647597303269708'
dtype: int64
- name: Janna strapless duchesse cotton-blend satin mini dress
dtype: string
- name: https://www.net-a-porter.com/in/en/shop/product/khaite/clothing/mini-dresses/janna-strapless-duchesse-cotton-blend-satin-mini-dress/1647597303269708
dtype: string
- name: https://www.net-a-porter.com/variants/images/1647597303269708/ou/w1000.jpg
dtype: string
- name: '2094.00'
dtype: float64
- name: '1047.00'
dtype: float64
- name: '1958.84'
dtype: float64
- name: '979.42'
dtype: float64
- name: '1'
dtype: int64
splits:
- name: train
num_bytes: 17965087
num_examples: 44420
download_size: 5762779
dataset_size: 17965087
---
# Net-a-Porter web scraped data
## About the website
In the Asia Pacific region, particularly India, retail industries are witnessing a significant digital transformation. The **Ecommerce industry** is particularly flourishing, revolutionised by advanced technology, the proliferation of smartphones, and improved internet infrastructure. The **online fashion retail** sector is a key player in this surge, making a strong foothold in the world of Ecommerce. **Net-a-Porter**, a premier luxury online fashion retailer operates within this industry. The dataset examined, provides **Ecommerce product-list page (PLP) data** on Net-a-Porters offerings in India, revealing significant insights on product range, consumer preferences, price points and trends in the Indian online luxury fashion retail landscape.
## Link to **dataset**
[India - Net-a-Porter - Product-level price list dataset](https://www.databoutique.com/buy-data-page/Net-a-Porter%20Product-prices%20India/r/rec5aEBQ3a7hC1MIp)
| [
-0.3120414912700653,
-0.3068273961544037,
-0.2645328938961029,
0.6409540772438049,
-0.3861827552318573,
0.2839544415473938,
0.4640180170536041,
-0.8284400701522827,
0.37389886379241943,
0.26534563302993774,
-1.0525803565979004,
-0.3474869132041931,
0.020420849323272705,
0.12575899064540863... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DBQ/Chloe.Product.prices.United.States | DBQ | 2023-11-18T22:14:59Z | 0 | 0 | null | [
"task_categories:text-classification",
"task_categories:image-classification",
"task_categories:feature-extraction",
"task_categories:image-segmentation",
"task_categories:image-to-image",
"task_categories:image-to-text",
"task_categories:object-detection",
"task_categories:summarization",
"task_cat... | 2023-11-18T22:14:59Z | 2023-11-18T22:14:54.000Z | 2023-11-18T22:14:54 | ---
annotations_creators:
- other
language_creators:
- other
language:
- en
license:
- unknown
multilinguality:
- monolingual
source_datasets:
- original
task_categories:
- text-classification
- image-classification
- feature-extraction
- image-segmentation
- image-to-image
- image-to-text
- object-detection
- summarization
- zero-shot-image-classification
pretty_name: United States - Chloe - Product-level price list
tags:
- webscraping
- ecommerce
- Chloe
- fashion
- fashion product
- image
- fashion image
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: Chloe
dtype: string
- name: '2023-11-08'
dtype: string
- name: USA
dtype: string
- name: USD
dtype: string
- name: CHLOE
dtype: string
- name: WOMEN
dtype: string
- name: NEW ARRIVALS
dtype: string
- name: WINTER 2023
dtype: string
- name: 45800720AE
dtype: string
- name: Hana mini bag
dtype: string
- name: https://www.chloe.com/us/shoulder-bag_cod45800720ae.html
dtype: string
- name: https://www.chloe.com/product_image/45800720AE/f/w282.jpg
dtype: string
- name: '430.00'
dtype: float64
- name: 430.00.1
dtype: float64
- name: '402.24'
dtype: float64
- name: 402.24.1
dtype: float64
- name: '0'
dtype: int64
splits:
- name: train
num_bytes: 714750
num_examples: 2569
download_size: 162722
dataset_size: 714750
---
# Chloe web scraped data
## About the website
The **Ecommerce industry** in America, particularly in the United States, has been showing rapid growth and substantial advancements amidst digitalization and increased online shopping trends. One vital sector within this industry is the **fashion industry**, where renowned brands like **Chloe** record significant sales. The brand operates through both physical outlets and online platforms. The dataset observed has **Ecommerce product-list page (PLP) data on Chloe** in United States, providing insightful observations on their digital performance, customer preferences, and shopping behavior. Such data is instrumental to marketers and analysts in understanding market trends, designing strategies, and focusing on customer segments to increase revenue and profits.
## Link to **dataset**
[United States - Chloe - Product-level price list dataset](https://www.databoutique.com/buy-data-page/Chloe%20Product-prices%20United%20States/r/recHvXlrm9VQZj0kM)
| [
-0.08423413336277008,
-0.3821715712547302,
0.10039941966533661,
0.5075275301933289,
-0.24123625457286835,
0.16061808168888092,
0.5209152102470398,
-0.6016657948493958,
0.4743955433368683,
0.24052484333515167,
-0.903451144695282,
-0.7928147315979004,
0.2729908525943756,
-0.29766738414764404... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DBQ/Bottega.Veneta.Product.prices.Italy | DBQ | 2023-11-18T22:15:08Z | 0 | 0 | null | [
"task_categories:text-classification",
"task_categories:image-classification",
"task_categories:feature-extraction",
"task_categories:image-segmentation",
"task_categories:image-to-image",
"task_categories:image-to-text",
"task_categories:object-detection",
"task_categories:summarization",
"task_cat... | 2023-11-18T22:15:08Z | 2023-11-18T22:15:04.000Z | 2023-11-18T22:15:04 | ---
annotations_creators:
- other
language_creators:
- other
language:
- en
license:
- unknown
multilinguality:
- monolingual
source_datasets:
- original
task_categories:
- text-classification
- image-classification
- feature-extraction
- image-segmentation
- image-to-image
- image-to-text
- object-detection
- summarization
- zero-shot-image-classification
pretty_name: Italy - Bottega Veneta - Product-level price list
tags:
- webscraping
- ecommerce
- Bottega Veneta
- fashion
- fashion product
- image
- fashion image
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: Bottega Veneta
dtype: string
- name: '2023-11-08'
dtype: string
- name: ITA
dtype: string
- name: EUR
dtype: string
- name: BOTTEGA VENETA
dtype: string
- name: N.A.
dtype: string
- name: N.A..1
dtype: string
- name: N.A..2
dtype: string
- name: 765857VMAY02145
dtype: string
- name: Portachiavi Intreccio con gancio a goccia
dtype: string
- name: https://www.bottegaveneta.com/it-it/portachiavi-intreccio-con-gancio-a-goccia-fondant-765857VMAY02145.html
dtype: string
- name: https://bottega-veneta.dam.kering.com/m/4351e9501f1673da/Thumbnail-765857VMAY02145_A.jpg?v=1
dtype: string
- name: '390.00'
dtype: float64
- name: 390.00.1
dtype: float64
- name: 390.00.2
dtype: float64
- name: 390.00.3
dtype: float64
- name: '0'
dtype: int64
splits:
- name: train
num_bytes: 1629316
num_examples: 4394
download_size: 467120
dataset_size: 1629316
---
# Bottega Veneta web scraped data
## About the website
**Bottega Veneta** operates in the luxury fashion industry within the EMEA region, specifically in **Italy**. This industry is recognized worldwide for its high-end, quality products that range from clothing, accessories to fragrances and home furnishings. Italy, in particular, holds a significant place in this market, given its long-standing reputation and excellence in the design and manufacturing of luxurious goods. With the steady increase in digital transformation, **Ecommerce** has become a crucial part of this sector, providing opportunities for brands like Bottega Veneta to reach global consumers more efficiently. The dataset observed contains **Ecommerce product-list page (PLP) data** on Bottega Veneta in Italy.
## Link to **dataset**
[Italy - Bottega Veneta - Product-level price list dataset](https://www.databoutique.com/buy-data-page/Bottega%20Veneta%20Product-prices%20Italy/r/recoQt2LVCwv0kJbV)
| [
-0.22995175421237946,
-0.787541389465332,
0.15285199880599976,
0.1769132763147354,
-0.19479987025260925,
0.322195440530777,
0.1576649695634842,
-0.6116788387298584,
0.6124821901321411,
0.4006318151950836,
-0.9632405638694763,
-0.9003748893737793,
0.057501476258039474,
-0.042658496648073196... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DBQ/Net.a.Porter.Product.prices.Hong.Kong | DBQ | 2023-11-18T22:15:26Z | 0 | 0 | null | [
"task_categories:text-classification",
"task_categories:image-classification",
"task_categories:feature-extraction",
"task_categories:image-segmentation",
"task_categories:image-to-image",
"task_categories:image-to-text",
"task_categories:object-detection",
"task_categories:summarization",
"task_cat... | 2023-11-18T22:15:26Z | 2023-11-18T22:15:16.000Z | 2023-11-18T22:15:16 | ---
annotations_creators:
- other
language_creators:
- other
language:
- en
license:
- unknown
multilinguality:
- monolingual
source_datasets:
- original
task_categories:
- text-classification
- image-classification
- feature-extraction
- image-segmentation
- image-to-image
- image-to-text
- object-detection
- summarization
- zero-shot-image-classification
pretty_name: Hong Kong - Net-a-Porter - Product-level price list
tags:
- webscraping
- ecommerce
- Net
- fashion
- fashion product
- image
- fashion image
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: Net-a-Porter
dtype: string
- name: '2023-11-08'
dtype: string
- name: HKG
dtype: string
- name: HKD
dtype: string
- name: GIANVITO ROSSI
dtype: string
- name: SHOES
dtype: string
- name: SANDALS
dtype: string
- name: FLAT
dtype: string
- name: '10163292707857611'
dtype: int64
- name: Portofino 20 suede sandals
dtype: string
- name: https://www.net-a-porter.com/hk/en/shop/product/gianvito-rossi/shoes/flat/portofino-20-suede-sandals/10163292707857611
dtype: string
- name: https://www.net-a-porter.com/variants/images/10163292707857611/ou/w1000.jpg
dtype: string
- name: '6900.00'
dtype: float64
- name: 6900.00.1
dtype: float64
- name: '825.16'
dtype: float64
- name: 825.16.1
dtype: float64
- name: '0'
dtype: int64
splits:
- name: train
num_bytes: 21075726
num_examples: 51659
download_size: 6495108
dataset_size: 21075726
---
# Net-a-Porter web scraped data
## About the website
The **Ecommerce** industry in the **Asia Pacific**, particularly in **Hong Kong**, has shown significant growth with a rising number of online shoppers. Companies, such as **Net-a-Porter**, have been thriving in this robust market, offering luxury fashion and accessories through their digital platforms. Hong Kongs high internet penetration and mobile device usage rates provide an excellent environment for Ecommerce businesses. Additionally, its consumers high purchasing power and desire for luxury brands have made this region a vital location for digital retail. The dataset observed encompassed the **Ecommerce product-list page (PLP) data** of Net-a-Porters operation in **Hong Kong**.
## Link to **dataset**
[Hong Kong - Net-a-Porter - Product-level price list dataset](https://www.databoutique.com/buy-data-page/Net-a-Porter%20Product-prices%20Hong%20Kong/r/recLMteOOeDjt4LbK)
| [
-0.3662470579147339,
-0.44926872849464417,
-0.11996858566999435,
0.7440378665924072,
-0.3671751618385315,
0.14093340933322906,
0.47461944818496704,
-0.6374553442001343,
0.40561461448669434,
0.5556116700172424,
-0.7467550039291382,
-0.571694552898407,
0.026920095086097717,
0.061629295349121... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
faizalnf1800/scifi-webnovel | faizalnf1800 | 2023-11-20T00:33:45Z | 0 | 0 | null | [
"license:mit",
"region:us"
] | 2023-11-20T00:33:45Z | 2023-11-18T22:18:26.000Z | 2023-11-18T22:18:26 | ---
license: mit
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Arturex2/RECURSOSHUMANOS_UNSAAC | Arturex2 | 2023-11-18T22:21:39Z | 0 | 0 | null | [
"license:apache-2.0",
"region:us"
] | 2023-11-18T22:21:39Z | 2023-11-18T22:21:39.000Z | 2023-11-18T22:21:39 | ---
license: apache-2.0
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-1.3b-instruct_public | open-llm-leaderboard | 2023-11-18T22:28:08Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T22:28:08Z | 2023-11-18T22:27:19.000Z | 2023-11-18T22:27:19 | ---
pretty_name: Evaluation run of AI-Sweden-Models/gpt-sw3-1.3b-instruct
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [AI-Sweden-Models/gpt-sw3-1.3b-instruct](https://huggingface.co/AI-Sweden-Models/gpt-sw3-1.3b-instruct)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-1.3b-instruct_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T22:24:58.034031](https://huggingface.co/datasets/open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-1.3b-instruct_public/blob/main/results_2023-11-18T22-24-58.034031.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.26508562345540565,\n\
\ \"acc_stderr\": 0.031234722319638815,\n \"acc_norm\": 0.26674815683597625,\n\
\ \"acc_norm_stderr\": 0.03200950359205627,\n \"mc1\": 0.23990208078335373,\n\
\ \"mc1_stderr\": 0.014948812679062133,\n \"mc2\": 0.4031210627490525,\n\
\ \"mc2_stderr\": 0.014405145562379922,\n \"em\": 0.0003145973154362416,\n\
\ \"em_stderr\": 0.0001816137946883977,\n \"f1\": 0.04640520134228205,\n\
\ \"f1_stderr\": 0.001234443866638647\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.2713310580204778,\n \"acc_stderr\": 0.012993807727545797,\n\
\ \"acc_norm\": 0.3097269624573379,\n \"acc_norm_stderr\": 0.01351205841523836\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.40131447918741286,\n\
\ \"acc_stderr\": 0.0048916267180972575,\n \"acc_norm\": 0.5142401911969727,\n\
\ \"acc_norm_stderr\": 0.004987757314769834\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909282,\n \
\ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909282\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.28888888888888886,\n\
\ \"acc_stderr\": 0.03915450630414251,\n \"acc_norm\": 0.28888888888888886,\n\
\ \"acc_norm_stderr\": 0.03915450630414251\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.29605263157894735,\n \"acc_stderr\": 0.037150621549989056,\n\
\ \"acc_norm\": 0.29605263157894735,\n \"acc_norm_stderr\": 0.037150621549989056\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.31,\n\
\ \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \
\ \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.23018867924528302,\n \"acc_stderr\": 0.02590789712240817,\n\
\ \"acc_norm\": 0.23018867924528302,\n \"acc_norm_stderr\": 0.02590789712240817\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2847222222222222,\n\
\ \"acc_stderr\": 0.03773809990686933,\n \"acc_norm\": 0.2847222222222222,\n\
\ \"acc_norm_stderr\": 0.03773809990686933\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.19,\n \"acc_stderr\": 0.03942772444036623,\n \
\ \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.03942772444036623\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.24,\n\
\ \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \
\ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2774566473988439,\n\
\ \"acc_stderr\": 0.034140140070440354,\n \"acc_norm\": 0.2774566473988439,\n\
\ \"acc_norm_stderr\": 0.034140140070440354\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.043364327079931785,\n\
\ \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.043364327079931785\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n\
\ \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.2936170212765957,\n \"acc_stderr\": 0.02977164271249123,\n\
\ \"acc_norm\": 0.2936170212765957,\n \"acc_norm_stderr\": 0.02977164271249123\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.19298245614035087,\n\
\ \"acc_stderr\": 0.037124548537213684,\n \"acc_norm\": 0.19298245614035087,\n\
\ \"acc_norm_stderr\": 0.037124548537213684\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.33793103448275863,\n \"acc_stderr\": 0.039417076320648906,\n\
\ \"acc_norm\": 0.33793103448275863,\n \"acc_norm_stderr\": 0.039417076320648906\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.2724867724867725,\n \"acc_stderr\": 0.022930973071633345,\n \"\
acc_norm\": 0.2724867724867725,\n \"acc_norm_stderr\": 0.022930973071633345\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.19047619047619047,\n\
\ \"acc_stderr\": 0.03512207412302054,\n \"acc_norm\": 0.19047619047619047,\n\
\ \"acc_norm_stderr\": 0.03512207412302054\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \
\ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.24838709677419354,\n\
\ \"acc_stderr\": 0.024580028921481003,\n \"acc_norm\": 0.24838709677419354,\n\
\ \"acc_norm_stderr\": 0.024580028921481003\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.2561576354679803,\n \"acc_stderr\": 0.0307127300709826,\n\
\ \"acc_norm\": 0.2561576354679803,\n \"acc_norm_stderr\": 0.0307127300709826\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\"\
: 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.2606060606060606,\n \"acc_stderr\": 0.03427743175816524,\n\
\ \"acc_norm\": 0.2606060606060606,\n \"acc_norm_stderr\": 0.03427743175816524\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.20707070707070707,\n \"acc_stderr\": 0.028869778460267035,\n \"\
acc_norm\": 0.20707070707070707,\n \"acc_norm_stderr\": 0.028869778460267035\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.21243523316062177,\n \"acc_stderr\": 0.029519282616817244,\n\
\ \"acc_norm\": 0.21243523316062177,\n \"acc_norm_stderr\": 0.029519282616817244\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.24102564102564103,\n \"acc_stderr\": 0.02168554666533319,\n\
\ \"acc_norm\": 0.24102564102564103,\n \"acc_norm_stderr\": 0.02168554666533319\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.25925925925925924,\n \"acc_stderr\": 0.026719240783712163,\n \
\ \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.026719240783712163\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.18067226890756302,\n \"acc_stderr\": 0.024991964966600753,\n\
\ \"acc_norm\": 0.18067226890756302,\n \"acc_norm_stderr\": 0.024991964966600753\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.25165562913907286,\n \"acc_stderr\": 0.03543304234389985,\n \"\
acc_norm\": 0.25165562913907286,\n \"acc_norm_stderr\": 0.03543304234389985\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.21467889908256882,\n \"acc_stderr\": 0.017604304149256487,\n \"\
acc_norm\": 0.21467889908256882,\n \"acc_norm_stderr\": 0.017604304149256487\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.1574074074074074,\n \"acc_stderr\": 0.02483717351824239,\n \"\
acc_norm\": 0.1574074074074074,\n \"acc_norm_stderr\": 0.02483717351824239\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.2549019607843137,\n \"acc_stderr\": 0.030587591351604243,\n \"\
acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.030587591351604243\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.2869198312236287,\n \"acc_stderr\": 0.02944377302259469,\n \
\ \"acc_norm\": 0.2869198312236287,\n \"acc_norm_stderr\": 0.02944377302259469\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.33183856502242154,\n\
\ \"acc_stderr\": 0.03160295143776679,\n \"acc_norm\": 0.33183856502242154,\n\
\ \"acc_norm_stderr\": 0.03160295143776679\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.22137404580152673,\n \"acc_stderr\": 0.03641297081313729,\n\
\ \"acc_norm\": 0.22137404580152673,\n \"acc_norm_stderr\": 0.03641297081313729\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.3305785123966942,\n \"acc_stderr\": 0.04294340845212095,\n \"\
acc_norm\": 0.3305785123966942,\n \"acc_norm_stderr\": 0.04294340845212095\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.26851851851851855,\n\
\ \"acc_stderr\": 0.04284467968052191,\n \"acc_norm\": 0.26851851851851855,\n\
\ \"acc_norm_stderr\": 0.04284467968052191\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.2822085889570552,\n \"acc_stderr\": 0.03536117886664743,\n\
\ \"acc_norm\": 0.2822085889570552,\n \"acc_norm_stderr\": 0.03536117886664743\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.29464285714285715,\n\
\ \"acc_stderr\": 0.043270409325787296,\n \"acc_norm\": 0.29464285714285715,\n\
\ \"acc_norm_stderr\": 0.043270409325787296\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.23300970873786409,\n \"acc_stderr\": 0.04185832598928315,\n\
\ \"acc_norm\": 0.23300970873786409,\n \"acc_norm_stderr\": 0.04185832598928315\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2863247863247863,\n\
\ \"acc_stderr\": 0.02961432369045665,\n \"acc_norm\": 0.2863247863247863,\n\
\ \"acc_norm_stderr\": 0.02961432369045665\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \
\ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.29757343550446996,\n\
\ \"acc_stderr\": 0.016349111912909428,\n \"acc_norm\": 0.29757343550446996,\n\
\ \"acc_norm_stderr\": 0.016349111912909428\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.2861271676300578,\n \"acc_stderr\": 0.02433214677913413,\n\
\ \"acc_norm\": 0.2861271676300578,\n \"acc_norm_stderr\": 0.02433214677913413\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24804469273743016,\n\
\ \"acc_stderr\": 0.014444157808261448,\n \"acc_norm\": 0.24804469273743016,\n\
\ \"acc_norm_stderr\": 0.014444157808261448\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.22875816993464052,\n \"acc_stderr\": 0.024051029739912255,\n\
\ \"acc_norm\": 0.22875816993464052,\n \"acc_norm_stderr\": 0.024051029739912255\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.27009646302250806,\n\
\ \"acc_stderr\": 0.025218040373410616,\n \"acc_norm\": 0.27009646302250806,\n\
\ \"acc_norm_stderr\": 0.025218040373410616\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.2654320987654321,\n \"acc_stderr\": 0.024569223600460845,\n\
\ \"acc_norm\": 0.2654320987654321,\n \"acc_norm_stderr\": 0.024569223600460845\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.2624113475177305,\n \"acc_stderr\": 0.026244920349843014,\n \
\ \"acc_norm\": 0.2624113475177305,\n \"acc_norm_stderr\": 0.026244920349843014\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2607561929595828,\n\
\ \"acc_stderr\": 0.011213471559602325,\n \"acc_norm\": 0.2607561929595828,\n\
\ \"acc_norm_stderr\": 0.011213471559602325\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.26838235294117646,\n \"acc_stderr\": 0.026917481224377232,\n\
\ \"acc_norm\": 0.26838235294117646,\n \"acc_norm_stderr\": 0.026917481224377232\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.29901960784313725,\n \"acc_stderr\": 0.018521756215423024,\n \
\ \"acc_norm\": 0.29901960784313725,\n \"acc_norm_stderr\": 0.018521756215423024\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.32727272727272727,\n\
\ \"acc_stderr\": 0.04494290866252089,\n \"acc_norm\": 0.32727272727272727,\n\
\ \"acc_norm_stderr\": 0.04494290866252089\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.19183673469387755,\n \"acc_stderr\": 0.025206963154225406,\n\
\ \"acc_norm\": 0.19183673469387755,\n \"acc_norm_stderr\": 0.025206963154225406\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.2537313432835821,\n\
\ \"acc_stderr\": 0.03076944496729602,\n \"acc_norm\": 0.2537313432835821,\n\
\ \"acc_norm_stderr\": 0.03076944496729602\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \
\ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.27710843373493976,\n\
\ \"acc_stderr\": 0.03484331592680589,\n \"acc_norm\": 0.27710843373493976,\n\
\ \"acc_norm_stderr\": 0.03484331592680589\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.2982456140350877,\n \"acc_stderr\": 0.03508771929824565,\n\
\ \"acc_norm\": 0.2982456140350877,\n \"acc_norm_stderr\": 0.03508771929824565\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23990208078335373,\n\
\ \"mc1_stderr\": 0.014948812679062133,\n \"mc2\": 0.4031210627490525,\n\
\ \"mc2_stderr\": 0.014405145562379922\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.5674822415153907,\n \"acc_stderr\": 0.013923911578623839\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.0003145973154362416,\n \
\ \"em_stderr\": 0.0001816137946883977,\n \"f1\": 0.04640520134228205,\n\
\ \"f1_stderr\": 0.001234443866638647\n },\n \"harness|gsm8k|5\": {\n\
\ \"acc\": 0.01592115238817286,\n \"acc_stderr\": 0.003447819272389002\n\
\ }\n}\n```"
repo_url: https://huggingface.co/AI-Sweden-Models/gpt-sw3-1.3b-instruct
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|arc:challenge|25_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|drop|3_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|gsm8k|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hellaswag|10_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T22-24-58.034031.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T22-24-58.034031.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- '**/details_harness|winogrande|5_2023-11-18T22-24-58.034031.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T22-24-58.034031.parquet'
- config_name: results
data_files:
- split: 2023_11_18T22_24_58.034031
path:
- results_2023-11-18T22-24-58.034031.parquet
- split: latest
path:
- results_2023-11-18T22-24-58.034031.parquet
---
# Dataset Card for Evaluation run of AI-Sweden-Models/gpt-sw3-1.3b-instruct
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/AI-Sweden-Models/gpt-sw3-1.3b-instruct
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [AI-Sweden-Models/gpt-sw3-1.3b-instruct](https://huggingface.co/AI-Sweden-Models/gpt-sw3-1.3b-instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-1.3b-instruct_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T22:24:58.034031](https://huggingface.co/datasets/open-llm-leaderboard/details_AI-Sweden-Models__gpt-sw3-1.3b-instruct_public/blob/main/results_2023-11-18T22-24-58.034031.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.26508562345540565,
"acc_stderr": 0.031234722319638815,
"acc_norm": 0.26674815683597625,
"acc_norm_stderr": 0.03200950359205627,
"mc1": 0.23990208078335373,
"mc1_stderr": 0.014948812679062133,
"mc2": 0.4031210627490525,
"mc2_stderr": 0.014405145562379922,
"em": 0.0003145973154362416,
"em_stderr": 0.0001816137946883977,
"f1": 0.04640520134228205,
"f1_stderr": 0.001234443866638647
},
"harness|arc:challenge|25": {
"acc": 0.2713310580204778,
"acc_stderr": 0.012993807727545797,
"acc_norm": 0.3097269624573379,
"acc_norm_stderr": 0.01351205841523836
},
"harness|hellaswag|10": {
"acc": 0.40131447918741286,
"acc_stderr": 0.0048916267180972575,
"acc_norm": 0.5142401911969727,
"acc_norm_stderr": 0.004987757314769834
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909282,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909282
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.28888888888888886,
"acc_stderr": 0.03915450630414251,
"acc_norm": 0.28888888888888886,
"acc_norm_stderr": 0.03915450630414251
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.29605263157894735,
"acc_stderr": 0.037150621549989056,
"acc_norm": 0.29605263157894735,
"acc_norm_stderr": 0.037150621549989056
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.23018867924528302,
"acc_stderr": 0.02590789712240817,
"acc_norm": 0.23018867924528302,
"acc_norm_stderr": 0.02590789712240817
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.2847222222222222,
"acc_stderr": 0.03773809990686933,
"acc_norm": 0.2847222222222222,
"acc_norm_stderr": 0.03773809990686933
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.19,
"acc_stderr": 0.03942772444036623,
"acc_norm": 0.19,
"acc_norm_stderr": 0.03942772444036623
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909284
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.2774566473988439,
"acc_stderr": 0.034140140070440354,
"acc_norm": 0.2774566473988439,
"acc_norm_stderr": 0.034140140070440354
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.043364327079931785,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.043364327079931785
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.29,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.29,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.2936170212765957,
"acc_stderr": 0.02977164271249123,
"acc_norm": 0.2936170212765957,
"acc_norm_stderr": 0.02977164271249123
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.19298245614035087,
"acc_stderr": 0.037124548537213684,
"acc_norm": 0.19298245614035087,
"acc_norm_stderr": 0.037124548537213684
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.33793103448275863,
"acc_stderr": 0.039417076320648906,
"acc_norm": 0.33793103448275863,
"acc_norm_stderr": 0.039417076320648906
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.2724867724867725,
"acc_stderr": 0.022930973071633345,
"acc_norm": 0.2724867724867725,
"acc_norm_stderr": 0.022930973071633345
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.19047619047619047,
"acc_stderr": 0.03512207412302054,
"acc_norm": 0.19047619047619047,
"acc_norm_stderr": 0.03512207412302054
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.24838709677419354,
"acc_stderr": 0.024580028921481003,
"acc_norm": 0.24838709677419354,
"acc_norm_stderr": 0.024580028921481003
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.2561576354679803,
"acc_stderr": 0.0307127300709826,
"acc_norm": 0.2561576354679803,
"acc_norm_stderr": 0.0307127300709826
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.2606060606060606,
"acc_stderr": 0.03427743175816524,
"acc_norm": 0.2606060606060606,
"acc_norm_stderr": 0.03427743175816524
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.20707070707070707,
"acc_stderr": 0.028869778460267035,
"acc_norm": 0.20707070707070707,
"acc_norm_stderr": 0.028869778460267035
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.21243523316062177,
"acc_stderr": 0.029519282616817244,
"acc_norm": 0.21243523316062177,
"acc_norm_stderr": 0.029519282616817244
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.24102564102564103,
"acc_stderr": 0.02168554666533319,
"acc_norm": 0.24102564102564103,
"acc_norm_stderr": 0.02168554666533319
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.25925925925925924,
"acc_stderr": 0.026719240783712163,
"acc_norm": 0.25925925925925924,
"acc_norm_stderr": 0.026719240783712163
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.18067226890756302,
"acc_stderr": 0.024991964966600753,
"acc_norm": 0.18067226890756302,
"acc_norm_stderr": 0.024991964966600753
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.25165562913907286,
"acc_stderr": 0.03543304234389985,
"acc_norm": 0.25165562913907286,
"acc_norm_stderr": 0.03543304234389985
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.21467889908256882,
"acc_stderr": 0.017604304149256487,
"acc_norm": 0.21467889908256882,
"acc_norm_stderr": 0.017604304149256487
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.1574074074074074,
"acc_stderr": 0.02483717351824239,
"acc_norm": 0.1574074074074074,
"acc_norm_stderr": 0.02483717351824239
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.030587591351604243,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.030587591351604243
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.2869198312236287,
"acc_stderr": 0.02944377302259469,
"acc_norm": 0.2869198312236287,
"acc_norm_stderr": 0.02944377302259469
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.33183856502242154,
"acc_stderr": 0.03160295143776679,
"acc_norm": 0.33183856502242154,
"acc_norm_stderr": 0.03160295143776679
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.22137404580152673,
"acc_stderr": 0.03641297081313729,
"acc_norm": 0.22137404580152673,
"acc_norm_stderr": 0.03641297081313729
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.3305785123966942,
"acc_stderr": 0.04294340845212095,
"acc_norm": 0.3305785123966942,
"acc_norm_stderr": 0.04294340845212095
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.26851851851851855,
"acc_stderr": 0.04284467968052191,
"acc_norm": 0.26851851851851855,
"acc_norm_stderr": 0.04284467968052191
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.2822085889570552,
"acc_stderr": 0.03536117886664743,
"acc_norm": 0.2822085889570552,
"acc_norm_stderr": 0.03536117886664743
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.29464285714285715,
"acc_stderr": 0.043270409325787296,
"acc_norm": 0.29464285714285715,
"acc_norm_stderr": 0.043270409325787296
},
"harness|hendrycksTest-management|5": {
"acc": 0.23300970873786409,
"acc_stderr": 0.04185832598928315,
"acc_norm": 0.23300970873786409,
"acc_norm_stderr": 0.04185832598928315
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.2863247863247863,
"acc_stderr": 0.02961432369045665,
"acc_norm": 0.2863247863247863,
"acc_norm_stderr": 0.02961432369045665
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.26,
"acc_stderr": 0.044084400227680794,
"acc_norm": 0.26,
"acc_norm_stderr": 0.044084400227680794
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.29757343550446996,
"acc_stderr": 0.016349111912909428,
"acc_norm": 0.29757343550446996,
"acc_norm_stderr": 0.016349111912909428
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.2861271676300578,
"acc_stderr": 0.02433214677913413,
"acc_norm": 0.2861271676300578,
"acc_norm_stderr": 0.02433214677913413
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.24804469273743016,
"acc_stderr": 0.014444157808261448,
"acc_norm": 0.24804469273743016,
"acc_norm_stderr": 0.014444157808261448
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.22875816993464052,
"acc_stderr": 0.024051029739912255,
"acc_norm": 0.22875816993464052,
"acc_norm_stderr": 0.024051029739912255
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.27009646302250806,
"acc_stderr": 0.025218040373410616,
"acc_norm": 0.27009646302250806,
"acc_norm_stderr": 0.025218040373410616
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.2654320987654321,
"acc_stderr": 0.024569223600460845,
"acc_norm": 0.2654320987654321,
"acc_norm_stderr": 0.024569223600460845
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.2624113475177305,
"acc_stderr": 0.026244920349843014,
"acc_norm": 0.2624113475177305,
"acc_norm_stderr": 0.026244920349843014
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.2607561929595828,
"acc_stderr": 0.011213471559602325,
"acc_norm": 0.2607561929595828,
"acc_norm_stderr": 0.011213471559602325
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.26838235294117646,
"acc_stderr": 0.026917481224377232,
"acc_norm": 0.26838235294117646,
"acc_norm_stderr": 0.026917481224377232
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.29901960784313725,
"acc_stderr": 0.018521756215423024,
"acc_norm": 0.29901960784313725,
"acc_norm_stderr": 0.018521756215423024
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.32727272727272727,
"acc_stderr": 0.04494290866252089,
"acc_norm": 0.32727272727272727,
"acc_norm_stderr": 0.04494290866252089
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.19183673469387755,
"acc_stderr": 0.025206963154225406,
"acc_norm": 0.19183673469387755,
"acc_norm_stderr": 0.025206963154225406
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.2537313432835821,
"acc_stderr": 0.03076944496729602,
"acc_norm": 0.2537313432835821,
"acc_norm_stderr": 0.03076944496729602
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909283,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"harness|hendrycksTest-virology|5": {
"acc": 0.27710843373493976,
"acc_stderr": 0.03484331592680589,
"acc_norm": 0.27710843373493976,
"acc_norm_stderr": 0.03484331592680589
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.03508771929824565,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.03508771929824565
},
"harness|truthfulqa:mc|0": {
"mc1": 0.23990208078335373,
"mc1_stderr": 0.014948812679062133,
"mc2": 0.4031210627490525,
"mc2_stderr": 0.014405145562379922
},
"harness|winogrande|5": {
"acc": 0.5674822415153907,
"acc_stderr": 0.013923911578623839
},
"harness|drop|3": {
"em": 0.0003145973154362416,
"em_stderr": 0.0001816137946883977,
"f1": 0.04640520134228205,
"f1_stderr": 0.001234443866638647
},
"harness|gsm8k|5": {
"acc": 0.01592115238817286,
"acc_stderr": 0.003447819272389002
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.7085692286491394,
-0.8378478288650513,
0.32288697361946106,
0.20215995609760284,
-0.2001490592956543,
-0.07462755590677261,
0.03806689381599426,
-0.2464774250984192,
0.5075173377990723,
-0.048685528337955475,
-0.4634369909763336,
-0.707817792892456,
-0.4462711215019226,
0.22932726144790... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DBQ/Net.a.Porter.Product.prices.Spain | DBQ | 2023-11-18T22:28:37Z | 0 | 0 | null | [
"task_categories:text-classification",
"task_categories:image-classification",
"task_categories:feature-extraction",
"task_categories:image-segmentation",
"task_categories:image-to-image",
"task_categories:image-to-text",
"task_categories:object-detection",
"task_categories:summarization",
"task_cat... | 2023-11-18T22:28:37Z | 2023-11-18T22:28:37.000Z | 2023-11-18T22:28:37 | ---
annotations_creators:
- other
language:
- en
language_creators:
- other
license:
- unknown
multilinguality:
- monolingual
pretty_name: 'Spain - Net-a-Porter - Product-level price list'
source_datasets:
- original
tags:
- webscraping
- ecommerce
- 'Net'
- fashion
- fashion product
- image
- fashion image
task_categories:
- text-classification
- image-classification
- feature-extraction
- image-segmentation
- image-to-image
- image-to-text
- object-detection
- summarization
- zero-shot-image-classification
---
# Net-a-Porter web scraped data
## About the website
Net-a-Porter participates in the growing **ecommerce industry** in the **EMEA** region, particularly in **Spain**. The **fashion ecommerce industry** of Spain is expanding rapidly as digital technology becomes increasingly integrated in daily life. Many consumers are turning to online shopping for ease and convenience, and businesses are capitalizing on this trend. The provided dataset includes **Ecommerce product-list page (PLP) data** on Net-a-Porter in Spain, offering valuable insights to guide strategies within the online retail sector. In particular, it can help understand customer browsing behavior, preferences and purchasing patterns, crucial for content and product portfolio planning.
## Link to **dataset**
[Spain - Net-a-Porter - Product-level price list dataset](https://www.databoutique.com/buy-data-page/Net-a-Porter%20Product-prices%20Spain/r/recdTadGg6KS4lFMV)
| [
-0.446653813123703,
-0.29308196902275085,
-0.062132880091667175,
1.0190584659576416,
-0.2133072465658188,
0.1892421990633011,
0.4466879963874817,
-0.7834409475326538,
0.5648192167282104,
0.4021744728088379,
-1.0369691848754883,
-0.6164177656173706,
0.0354943610727787,
-0.05312391743063927,... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
ParisNeo/Word_in_Sentence_Database | ParisNeo | 2023-11-19T00:13:58Z | 0 | 2 | null | [
"task_categories:table-question-answering",
"language:en",
"license:apache-2.0",
"region:us"
] | 2023-11-19T00:13:58Z | 2023-11-18T22:31:02.000Z | 2023-11-18T22:31:02 | ---
license: apache-2.0
task_categories:
- table-question-answering
language:
- en
pretty_name: Word in Sentence database
---
# WIS database
This database contains a question answer list about text
This database was built using my this workflow:
1- load a raw text file
2- split into paragraphs
3- split paragraphs into sentences
4- for each word, ask question about its position and answer with the position, then ask about the word length and answer with the actual length of the word
5- ask a question about the number of words in the sentence and answer it
6- build a json database using this.
To do this, I kindly got the concent of Keith Curtis to use his website content as fuel to this algorithm.
The website can be found here:
https://keithcu.com/wordpress/?page_id=599
Best regards. | [
-0.5631294250488281,
-1.0793215036392212,
0.7588300704956055,
0.10790301859378815,
-0.24136875569820404,
0.05624087154865265,
-0.035891711711883545,
-0.25778260827064514,
0.5290417075157166,
0.6965683102607727,
-0.7639268636703491,
-0.35734546184539795,
-0.29419180750846863,
0.682788550853... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
ShubhamChoksi/IMDB_Movies | ShubhamChoksi | 2023-11-18T22:54:52Z | 0 | 0 | null | [
"license:mit",
"region:us"
] | 2023-11-18T22:54:52Z | 2023-11-18T22:52:57.000Z | 2023-11-18T22:52:57 | ---
license: mit
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
open-llm-leaderboard/details_maywell__Synatra-7B-v0.3-RP_public | open-llm-leaderboard | 2023-11-18T23:06:17Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T23:06:17Z | 2023-11-18T23:05:30.000Z | 2023-11-18T23:05:30 | ---
pretty_name: Evaluation run of maywell/Synatra-7B-v0.3-RP
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [maywell/Synatra-7B-v0.3-RP](https://huggingface.co/maywell/Synatra-7B-v0.3-RP)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_maywell__Synatra-7B-v0.3-RP_public\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-11-18T23:02:29.150817](https://huggingface.co/datasets/open-llm-leaderboard/details_maywell__Synatra-7B-v0.3-RP_public/blob/main/results_2023-11-18T23-02-29.150817.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6042478184701645,\n\
\ \"acc_stderr\": 0.03267991789724199,\n \"acc_norm\": 0.6118798356357696,\n\
\ \"acc_norm_stderr\": 0.03337492967666177,\n \"mc1\": 0.37209302325581395,\n\
\ \"mc1_stderr\": 0.016921090118814035,\n \"mc2\": 0.5263791321103062,\n\
\ \"mc2_stderr\": 0.015312628675104242,\n \"em\": 0.3953439597315436,\n\
\ \"em_stderr\": 0.005007043944789993,\n \"f1\": 0.46059983221476697,\n\
\ \"f1_stderr\": 0.00481810685968407\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5930034129692833,\n \"acc_stderr\": 0.014356399418009117,\n\
\ \"acc_norm\": 0.6220136518771331,\n \"acc_norm_stderr\": 0.014169664520303098\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6338378809002191,\n\
\ \"acc_stderr\": 0.004807699539973411,\n \"acc_norm\": 0.8229436367257519,\n\
\ \"acc_norm_stderr\": 0.003809362761248109\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n\
\ \"acc_stderr\": 0.04284958639753401,\n \"acc_norm\": 0.562962962962963,\n\
\ \"acc_norm_stderr\": 0.04284958639753401\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.03860731599316092,\n\
\ \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.03860731599316092\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n\
\ \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \
\ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6415094339622641,\n \"acc_stderr\": 0.029514703583981765,\n\
\ \"acc_norm\": 0.6415094339622641,\n \"acc_norm_stderr\": 0.029514703583981765\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6597222222222222,\n\
\ \"acc_stderr\": 0.039621355734862175,\n \"acc_norm\": 0.6597222222222222,\n\
\ \"acc_norm_stderr\": 0.039621355734862175\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \
\ \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\"\
: 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5664739884393064,\n\
\ \"acc_stderr\": 0.03778621079092056,\n \"acc_norm\": 0.5664739884393064,\n\
\ \"acc_norm_stderr\": 0.03778621079092056\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n\
\ \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.77,\n \"acc_stderr\": 0.04229525846816507,\n \"acc_norm\": 0.77,\n\
\ \"acc_norm_stderr\": 0.04229525846816507\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5106382978723404,\n \"acc_stderr\": 0.03267862331014063,\n\
\ \"acc_norm\": 0.5106382978723404,\n \"acc_norm_stderr\": 0.03267862331014063\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n\
\ \"acc_stderr\": 0.046854730419077895,\n \"acc_norm\": 0.45614035087719296,\n\
\ \"acc_norm_stderr\": 0.046854730419077895\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5241379310344828,\n \"acc_stderr\": 0.0416180850350153,\n\
\ \"acc_norm\": 0.5241379310344828,\n \"acc_norm_stderr\": 0.0416180850350153\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.4497354497354497,\n \"acc_stderr\": 0.02562085704293665,\n \"\
acc_norm\": 0.4497354497354497,\n \"acc_norm_stderr\": 0.02562085704293665\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n\
\ \"acc_stderr\": 0.044444444444444495,\n \"acc_norm\": 0.4444444444444444,\n\
\ \"acc_norm_stderr\": 0.044444444444444495\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \
\ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.7354838709677419,\n \"acc_stderr\": 0.02509189237885928,\n \"\
acc_norm\": 0.7354838709677419,\n \"acc_norm_stderr\": 0.02509189237885928\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.4630541871921182,\n \"acc_stderr\": 0.035083705204426656,\n \"\
acc_norm\": 0.4630541871921182,\n \"acc_norm_stderr\": 0.035083705204426656\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\"\
: 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009181,\n\
\ \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009181\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"\
acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8497409326424871,\n \"acc_stderr\": 0.02578772318072387,\n\
\ \"acc_norm\": 0.8497409326424871,\n \"acc_norm_stderr\": 0.02578772318072387\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6,\n \"acc_stderr\": 0.02483881198803316,\n \"acc_norm\"\
: 0.6,\n \"acc_norm_stderr\": 0.02483881198803316\n },\n \"harness|hendrycksTest-high_school_mathematics|5\"\
: {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085626,\n\
\ \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085626\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.634453781512605,\n \"acc_stderr\": 0.031282177063684614,\n \
\ \"acc_norm\": 0.634453781512605,\n \"acc_norm_stderr\": 0.031282177063684614\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3841059602649007,\n \"acc_stderr\": 0.03971301814719197,\n \"\
acc_norm\": 0.3841059602649007,\n \"acc_norm_stderr\": 0.03971301814719197\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.7944954128440367,\n \"acc_stderr\": 0.017324352325016022,\n \"\
acc_norm\": 0.7944954128440367,\n \"acc_norm_stderr\": 0.017324352325016022\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\"\
: 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n\
\ \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8088235294117647,\n\
\ \"acc_stderr\": 0.02759917430064077,\n \"acc_norm\": 0.8088235294117647,\n\
\ \"acc_norm_stderr\": 0.02759917430064077\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\
: {\n \"acc\": 0.7679324894514767,\n \"acc_stderr\": 0.02747974455080851,\n\
\ \"acc_norm\": 0.7679324894514767,\n \"acc_norm_stderr\": 0.02747974455080851\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n\
\ \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n\
\ \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7099236641221374,\n \"acc_stderr\": 0.03980066246467766,\n\
\ \"acc_norm\": 0.7099236641221374,\n \"acc_norm_stderr\": 0.03980066246467766\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"\
acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n\
\ \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n\
\ \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7300613496932515,\n \"acc_stderr\": 0.034878251684978906,\n\
\ \"acc_norm\": 0.7300613496932515,\n \"acc_norm_stderr\": 0.034878251684978906\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5178571428571429,\n\
\ \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.5178571428571429,\n\
\ \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.041858325989283136,\n\
\ \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.041858325989283136\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8461538461538461,\n\
\ \"acc_stderr\": 0.02363687331748928,\n \"acc_norm\": 0.8461538461538461,\n\
\ \"acc_norm_stderr\": 0.02363687331748928\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \
\ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8173690932311622,\n\
\ \"acc_stderr\": 0.013816335389973145,\n \"acc_norm\": 0.8173690932311622,\n\
\ \"acc_norm_stderr\": 0.013816335389973145\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.02541600377316554,\n\
\ \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.02541600377316554\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2346368715083799,\n\
\ \"acc_stderr\": 0.014173044098303654,\n \"acc_norm\": 0.2346368715083799,\n\
\ \"acc_norm_stderr\": 0.014173044098303654\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.026716118380156847,\n\
\ \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.026716118380156847\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6913183279742765,\n\
\ \"acc_stderr\": 0.026236965881153273,\n \"acc_norm\": 0.6913183279742765,\n\
\ \"acc_norm_stderr\": 0.026236965881153273\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.02517104191530968,\n\
\ \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.02517104191530968\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4645390070921986,\n \"acc_stderr\": 0.029752389657427047,\n \
\ \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.029752389657427047\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.455019556714472,\n\
\ \"acc_stderr\": 0.012718456618701768,\n \"acc_norm\": 0.455019556714472,\n\
\ \"acc_norm_stderr\": 0.012718456618701768\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6213235294117647,\n \"acc_stderr\": 0.02946513363977613,\n\
\ \"acc_norm\": 0.6213235294117647,\n \"acc_norm_stderr\": 0.02946513363977613\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6339869281045751,\n \"acc_stderr\": 0.019488025745529675,\n \
\ \"acc_norm\": 0.6339869281045751,\n \"acc_norm_stderr\": 0.019488025745529675\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n\
\ \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n\
\ \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.6775510204081633,\n \"acc_stderr\": 0.029923100563683903,\n\
\ \"acc_norm\": 0.6775510204081633,\n \"acc_norm_stderr\": 0.029923100563683903\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n\
\ \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n\
\ \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \
\ \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n\
\ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.46987951807228917,\n\
\ \"acc_stderr\": 0.03885425420866766,\n \"acc_norm\": 0.46987951807228917,\n\
\ \"acc_norm_stderr\": 0.03885425420866766\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n\
\ \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.37209302325581395,\n\
\ \"mc1_stderr\": 0.016921090118814035,\n \"mc2\": 0.5263791321103062,\n\
\ \"mc2_stderr\": 0.015312628675104242\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7647987371744278,\n \"acc_stderr\": 0.01192000816365087\n\
\ },\n \"harness|drop|3\": {\n \"em\": 0.3953439597315436,\n \
\ \"em_stderr\": 0.005007043944789993,\n \"f1\": 0.46059983221476697,\n\
\ \"f1_stderr\": 0.00481810685968407\n },\n \"harness|gsm8k|5\": {\n\
\ \"acc\": 0.21152388172858225,\n \"acc_stderr\": 0.01124906096863505\n\
\ }\n}\n```"
repo_url: https://huggingface.co/maywell/Synatra-7B-v0.3-RP
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|arc:challenge|25_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|drop|3_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|gsm8k|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hellaswag|10_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T23-02-29.150817.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-11-18T23-02-29.150817.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- '**/details_harness|winogrande|5_2023-11-18T23-02-29.150817.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-11-18T23-02-29.150817.parquet'
- config_name: results
data_files:
- split: 2023_11_18T23_02_29.150817
path:
- results_2023-11-18T23-02-29.150817.parquet
- split: latest
path:
- results_2023-11-18T23-02-29.150817.parquet
---
# Dataset Card for Evaluation run of maywell/Synatra-7B-v0.3-RP
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/maywell/Synatra-7B-v0.3-RP
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [maywell/Synatra-7B-v0.3-RP](https://huggingface.co/maywell/Synatra-7B-v0.3-RP) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_maywell__Synatra-7B-v0.3-RP_public",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-11-18T23:02:29.150817](https://huggingface.co/datasets/open-llm-leaderboard/details_maywell__Synatra-7B-v0.3-RP_public/blob/main/results_2023-11-18T23-02-29.150817.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6042478184701645,
"acc_stderr": 0.03267991789724199,
"acc_norm": 0.6118798356357696,
"acc_norm_stderr": 0.03337492967666177,
"mc1": 0.37209302325581395,
"mc1_stderr": 0.016921090118814035,
"mc2": 0.5263791321103062,
"mc2_stderr": 0.015312628675104242,
"em": 0.3953439597315436,
"em_stderr": 0.005007043944789993,
"f1": 0.46059983221476697,
"f1_stderr": 0.00481810685968407
},
"harness|arc:challenge|25": {
"acc": 0.5930034129692833,
"acc_stderr": 0.014356399418009117,
"acc_norm": 0.6220136518771331,
"acc_norm_stderr": 0.014169664520303098
},
"harness|hellaswag|10": {
"acc": 0.6338378809002191,
"acc_stderr": 0.004807699539973411,
"acc_norm": 0.8229436367257519,
"acc_norm_stderr": 0.003809362761248109
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.562962962962963,
"acc_stderr": 0.04284958639753401,
"acc_norm": 0.562962962962963,
"acc_norm_stderr": 0.04284958639753401
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6578947368421053,
"acc_stderr": 0.03860731599316092,
"acc_norm": 0.6578947368421053,
"acc_norm_stderr": 0.03860731599316092
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6415094339622641,
"acc_stderr": 0.029514703583981765,
"acc_norm": 0.6415094339622641,
"acc_norm_stderr": 0.029514703583981765
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.6597222222222222,
"acc_stderr": 0.039621355734862175,
"acc_norm": 0.6597222222222222,
"acc_norm_stderr": 0.039621355734862175
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5664739884393064,
"acc_stderr": 0.03778621079092056,
"acc_norm": 0.5664739884393064,
"acc_norm_stderr": 0.03778621079092056
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.042801058373643966,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.042801058373643966
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.77,
"acc_stderr": 0.04229525846816507,
"acc_norm": 0.77,
"acc_norm_stderr": 0.04229525846816507
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5106382978723404,
"acc_stderr": 0.03267862331014063,
"acc_norm": 0.5106382978723404,
"acc_norm_stderr": 0.03267862331014063
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.45614035087719296,
"acc_stderr": 0.046854730419077895,
"acc_norm": 0.45614035087719296,
"acc_norm_stderr": 0.046854730419077895
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5241379310344828,
"acc_stderr": 0.0416180850350153,
"acc_norm": 0.5241379310344828,
"acc_norm_stderr": 0.0416180850350153
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.4497354497354497,
"acc_stderr": 0.02562085704293665,
"acc_norm": 0.4497354497354497,
"acc_norm_stderr": 0.02562085704293665
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.044444444444444495,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.044444444444444495
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7354838709677419,
"acc_stderr": 0.02509189237885928,
"acc_norm": 0.7354838709677419,
"acc_norm_stderr": 0.02509189237885928
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4630541871921182,
"acc_stderr": 0.035083705204426656,
"acc_norm": 0.4630541871921182,
"acc_norm_stderr": 0.035083705204426656
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.63,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.63,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7636363636363637,
"acc_stderr": 0.03317505930009181,
"acc_norm": 0.7636363636363637,
"acc_norm_stderr": 0.03317505930009181
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.803030303030303,
"acc_stderr": 0.028335609732463362,
"acc_norm": 0.803030303030303,
"acc_norm_stderr": 0.028335609732463362
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8497409326424871,
"acc_stderr": 0.02578772318072387,
"acc_norm": 0.8497409326424871,
"acc_norm_stderr": 0.02578772318072387
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6,
"acc_stderr": 0.02483881198803316,
"acc_norm": 0.6,
"acc_norm_stderr": 0.02483881198803316
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2740740740740741,
"acc_stderr": 0.027195934804085626,
"acc_norm": 0.2740740740740741,
"acc_norm_stderr": 0.027195934804085626
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.634453781512605,
"acc_stderr": 0.031282177063684614,
"acc_norm": 0.634453781512605,
"acc_norm_stderr": 0.031282177063684614
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3841059602649007,
"acc_stderr": 0.03971301814719197,
"acc_norm": 0.3841059602649007,
"acc_norm_stderr": 0.03971301814719197
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7944954128440367,
"acc_stderr": 0.017324352325016022,
"acc_norm": 0.7944954128440367,
"acc_norm_stderr": 0.017324352325016022
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.4722222222222222,
"acc_stderr": 0.0340470532865388,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.0340470532865388
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8088235294117647,
"acc_stderr": 0.02759917430064077,
"acc_norm": 0.8088235294117647,
"acc_norm_stderr": 0.02759917430064077
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7679324894514767,
"acc_stderr": 0.02747974455080851,
"acc_norm": 0.7679324894514767,
"acc_norm_stderr": 0.02747974455080851
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6771300448430493,
"acc_stderr": 0.03138147637575499,
"acc_norm": 0.6771300448430493,
"acc_norm_stderr": 0.03138147637575499
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7099236641221374,
"acc_stderr": 0.03980066246467766,
"acc_norm": 0.7099236641221374,
"acc_norm_stderr": 0.03980066246467766
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8099173553719008,
"acc_stderr": 0.03581796951709282,
"acc_norm": 0.8099173553719008,
"acc_norm_stderr": 0.03581796951709282
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7685185185185185,
"acc_stderr": 0.04077494709252627,
"acc_norm": 0.7685185185185185,
"acc_norm_stderr": 0.04077494709252627
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7300613496932515,
"acc_stderr": 0.034878251684978906,
"acc_norm": 0.7300613496932515,
"acc_norm_stderr": 0.034878251684978906
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5178571428571429,
"acc_stderr": 0.047427623612430116,
"acc_norm": 0.5178571428571429,
"acc_norm_stderr": 0.047427623612430116
},
"harness|hendrycksTest-management|5": {
"acc": 0.7669902912621359,
"acc_stderr": 0.041858325989283136,
"acc_norm": 0.7669902912621359,
"acc_norm_stderr": 0.041858325989283136
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8461538461538461,
"acc_stderr": 0.02363687331748928,
"acc_norm": 0.8461538461538461,
"acc_norm_stderr": 0.02363687331748928
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542128
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8173690932311622,
"acc_stderr": 0.013816335389973145,
"acc_norm": 0.8173690932311622,
"acc_norm_stderr": 0.013816335389973145
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6647398843930635,
"acc_stderr": 0.02541600377316554,
"acc_norm": 0.6647398843930635,
"acc_norm_stderr": 0.02541600377316554
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.2346368715083799,
"acc_stderr": 0.014173044098303654,
"acc_norm": 0.2346368715083799,
"acc_norm_stderr": 0.014173044098303654
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6797385620915033,
"acc_stderr": 0.026716118380156847,
"acc_norm": 0.6797385620915033,
"acc_norm_stderr": 0.026716118380156847
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6913183279742765,
"acc_stderr": 0.026236965881153273,
"acc_norm": 0.6913183279742765,
"acc_norm_stderr": 0.026236965881153273
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7129629629629629,
"acc_stderr": 0.02517104191530968,
"acc_norm": 0.7129629629629629,
"acc_norm_stderr": 0.02517104191530968
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4645390070921986,
"acc_stderr": 0.029752389657427047,
"acc_norm": 0.4645390070921986,
"acc_norm_stderr": 0.029752389657427047
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.455019556714472,
"acc_stderr": 0.012718456618701768,
"acc_norm": 0.455019556714472,
"acc_norm_stderr": 0.012718456618701768
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6213235294117647,
"acc_stderr": 0.02946513363977613,
"acc_norm": 0.6213235294117647,
"acc_norm_stderr": 0.02946513363977613
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6339869281045751,
"acc_stderr": 0.019488025745529675,
"acc_norm": 0.6339869281045751,
"acc_norm_stderr": 0.019488025745529675
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6636363636363637,
"acc_stderr": 0.04525393596302506,
"acc_norm": 0.6636363636363637,
"acc_norm_stderr": 0.04525393596302506
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6775510204081633,
"acc_stderr": 0.029923100563683903,
"acc_norm": 0.6775510204081633,
"acc_norm_stderr": 0.029923100563683903
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8258706467661692,
"acc_stderr": 0.026814951200421603,
"acc_norm": 0.8258706467661692,
"acc_norm_stderr": 0.026814951200421603
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.83,
"acc_stderr": 0.0377525168068637,
"acc_norm": 0.83,
"acc_norm_stderr": 0.0377525168068637
},
"harness|hendrycksTest-virology|5": {
"acc": 0.46987951807228917,
"acc_stderr": 0.03885425420866766,
"acc_norm": 0.46987951807228917,
"acc_norm_stderr": 0.03885425420866766
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8070175438596491,
"acc_stderr": 0.030267457554898458,
"acc_norm": 0.8070175438596491,
"acc_norm_stderr": 0.030267457554898458
},
"harness|truthfulqa:mc|0": {
"mc1": 0.37209302325581395,
"mc1_stderr": 0.016921090118814035,
"mc2": 0.5263791321103062,
"mc2_stderr": 0.015312628675104242
},
"harness|winogrande|5": {
"acc": 0.7647987371744278,
"acc_stderr": 0.01192000816365087
},
"harness|drop|3": {
"em": 0.3953439597315436,
"em_stderr": 0.005007043944789993,
"f1": 0.46059983221476697,
"f1_stderr": 0.00481810685968407
},
"harness|gsm8k|5": {
"acc": 0.21152388172858225,
"acc_stderr": 0.01124906096863505
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] | [
-0.6848623156547546,
-0.8124610781669617,
0.2705233097076416,
0.17988257110118866,
-0.17542101442813873,
-0.08759750425815582,
0.026306454092264175,
-0.2457270622253418,
0.5572972893714905,
-0.05448390543460846,
-0.4944356679916382,
-0.7057128548622131,
-0.4385777413845062,
0.2626325190067... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
NIKMIKE/SNOWY | NIKMIKE | 2023-11-18T23:09:07Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T23:09:07Z | 2023-11-18T23:07:46.000Z | 2023-11-18T23:07:46 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
NIKMIKE/Neymar_COD | NIKMIKE | 2023-11-18T23:59:44Z | 0 | 0 | null | [
"region:us"
] | 2023-11-18T23:59:44Z | 2023-11-18T23:59:04.000Z | 2023-11-18T23:59:04 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
DBQ/Farfetch.Product.prices.United.Arab.Emirates | DBQ | 2023-11-19T00:16:15Z | 0 | 0 | null | [
"task_categories:text-classification",
"task_categories:image-classification",
"task_categories:feature-extraction",
"task_categories:image-segmentation",
"task_categories:image-to-image",
"task_categories:image-to-text",
"task_categories:object-detection",
"task_categories:summarization",
"task_cat... | 2023-11-19T00:16:15Z | 2023-11-19T00:16:15.000Z | 2023-11-19T00:16:15 | ---
annotations_creators:
- other
language:
- en
language_creators:
- other
license:
- unknown
multilinguality:
- monolingual
pretty_name: 'United Arab Emirates - Farfetch - Product-level price list'
source_datasets:
- original
tags:
- webscraping
- ecommerce
- 'Farfetch'
- fashion
- fashion product
- image
- fashion image
task_categories:
- text-classification
- image-classification
- feature-extraction
- image-segmentation
- image-to-image
- image-to-text
- object-detection
- summarization
- zero-shot-image-classification
---
# Farfetch web scraped data
## About the website
Farfetch operates within the **Ecommerce** industry in the EMEA region, specifically in the **United Arab Emirates**. This industry has grown exponentially in recent years, driven by an increasing number of internet users, accessibility to high-speed internet, and consumer convenience. In the UAE, the Ecommerce industry is especially dynamic and forward-thinking, with continuous technological advancements and an investment-friendly environment propelling its growth. The dataset observed includes **Ecommerce product-list page (PLP) data on Farfetch** in the UAE, providing valuable insights into the industry trends and consumer behaviour in this specific region.
## Link to **dataset**
[United Arab Emirates - Farfetch - Product-level price list dataset](https://www.databoutique.com/buy-data-page/Farfetch%20Product-prices%20United%20Arab%20Emirates/r/recltOpHnVuiZZmML)
| [
-0.7433668971061707,
-0.9743261337280273,
0.21774592995643616,
0.47236132621765137,
-0.19173429906368256,
0.21762630343437195,
0.42214399576187134,
-0.6516929864883423,
0.34533455967903137,
0.5693144202232361,
-0.9769070744514465,
-0.9311628937721252,
0.10056714713573456,
0.125845283269882... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
aisyahhrazak/crawl-medmalay | aisyahhrazak | 2023-11-19T10:32:48Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T10:32:48Z | 2023-11-19T00:27:57.000Z | 2023-11-19T00:27:57 | Entry not found | [
-0.32276487350463867,
-0.22568444907665253,
0.8622263073921204,
0.43461570143699646,
-0.5282988548278809,
0.7012969255447388,
0.7915717363357544,
0.07618642598390579,
0.7746027112007141,
0.25632190704345703,
-0.7852815389633179,
-0.22573848068714142,
-0.910447895526886,
0.5715675354003906,... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
aisyahhrazak/crawl-alhijrahnews | aisyahhrazak | 2023-11-19T00:28:43Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T00:28:43Z | 2023-11-19T00:28:43.000Z | 2023-11-19T00:28:43 | Entry not found | [
-0.32276487350463867,
-0.22568444907665253,
0.8622263073921204,
0.43461570143699646,
-0.5282988548278809,
0.7012969255447388,
0.7915717363357544,
0.07618642598390579,
0.7746027112007141,
0.25632190704345703,
-0.7852815389633179,
-0.22573848068714142,
-0.910447895526886,
0.5715675354003906,... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
aisyahhrazak/crawl-malaysian-website | aisyahhrazak | 2023-11-19T14:46:01Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T14:46:01Z | 2023-11-19T00:29:12.000Z | 2023-11-19T00:29:12 | Entry not found | [
-0.32276487350463867,
-0.22568444907665253,
0.8622263073921204,
0.43461570143699646,
-0.5282988548278809,
0.7012969255447388,
0.7915717363357544,
0.07618642598390579,
0.7746027112007141,
0.25632190704345703,
-0.7852815389633179,
-0.22573848068714142,
-0.910447895526886,
0.5715675354003906,... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
furry-br/angel-dust | furry-br | 2023-11-19T00:31:03Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-19T00:31:03Z | 2023-11-19T00:30:47.000Z | 2023-11-19T00:30:47 | ---
license: openrail
---
| [
-0.12853392958641052,
-0.18616779148578644,
0.6529127955436707,
0.49436280131340027,
-0.19319361448287964,
0.23607419431209564,
0.36072003841400146,
0.050563063472509384,
0.579365611076355,
0.7400140762329102,
-0.6508104205131531,
-0.23783954977989197,
-0.7102249264717102,
-0.0478260256350... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Breniinho/matheuszinho | Breniinho | 2023-11-19T00:56:27Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T00:56:27Z | 2023-11-19T00:41:58.000Z | 2023-11-19T00:41:58 | Entry not found | [
-0.32276472449302673,
-0.22568407654762268,
0.8622258901596069,
0.4346148371696472,
-0.5282984972000122,
0.7012965679168701,
0.7915717363357544,
0.07618629932403564,
0.7746022939682007,
0.2563222646713257,
-0.785281777381897,
-0.22573848068714142,
-0.9104482531547546,
0.5715669393539429,
... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
dtsvankin/AdvicetoImprove | dtsvankin | 2023-11-19T00:44:05Z | 0 | 0 | null | [
"license:mit",
"region:us"
] | 2023-11-19T00:44:05Z | 2023-11-19T00:44:05.000Z | 2023-11-19T00:44:05 | ---
license: mit
---
| [
-0.12853392958641052,
-0.18616779148578644,
0.6529127955436707,
0.49436280131340027,
-0.19319361448287964,
0.23607419431209564,
0.36072003841400146,
0.050563063472509384,
0.579365611076355,
0.7400140762329102,
-0.6508104205131531,
-0.23783954977989197,
-0.7102249264717102,
-0.0478260256350... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
gyrojeff/YuzuMarker.FontDetection | gyrojeff | 2023-11-19T02:51:10Z | 0 | 0 | null | [
"license:mit",
"region:us"
] | 2023-11-19T02:51:10Z | 2023-11-19T00:52:04.000Z | 2023-11-19T00:52:04 | ---
license: mit
---
# YuzuMarker.FontDetection
- Code: https://github.com/JeffersonQin/YuzuMarker.FontDetection
- Space: https://huggingface.co/spaces/gyrojeff/YuzuMarker.FontDetection
- Dataset: https://huggingface.co/datasets/gyrojeff/YuzuMarker.FontDetection/tree/master
The generated dataset is now available. Data are on the `master` branch due to initial commit error.
To use the data, each `.tar` package contains a `train`, `val`, `test` split. Move them to `./dataset`, then untar them.
To train the model, provide the path(s) of the untarred folders to the training script. For detail, please check the code repository.
| [
-0.25240761041641235,
-0.5650757551193237,
0.09118135273456573,
0.19188983738422394,
-0.48394644260406494,
-0.14345838129520416,
0.009473863057792187,
-0.3173615336418152,
0.3435259759426117,
0.44974324107170105,
-0.8244501948356628,
-0.362030029296875,
-0.46066761016845703,
-0.12703247368... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
justmalhar/hindi-short-news | justmalhar | 2023-11-19T01:25:50Z | 0 | 0 | null | [
"license:unlicense",
"region:us"
] | 2023-11-19T01:25:50Z | 2023-11-19T01:05:21.000Z | 2023-11-19T01:05:21 | ---
license: unlicense
dataset_info:
features:
- name: prompt
dtype: string
- name: completion
dtype: string
splits:
- name: train
num_bytes: 161184365
num_examples: 124235
download_size: 52742089
dataset_size: 161184365
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
| [
-0.12853367626667023,
-0.18616794049739838,
0.6529126763343811,
0.4943627417087555,
-0.19319313764572144,
0.23607443273067474,
0.36071979999542236,
0.05056338757276535,
0.5793654322624207,
0.7400138974189758,
-0.6508103013038635,
-0.23783987760543823,
-0.710224986076355,
-0.047825977206230... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
moshe1234/LukeCombs | moshe1234 | 2023-11-19T01:07:05Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T01:07:05Z | 2023-11-19T01:07:05.000Z | 2023-11-19T01:07:05 | Entry not found | [
-0.3227649927139282,
-0.225684255361557,
0.862226128578186,
0.43461498618125916,
-0.5282987952232361,
0.7012963891029358,
0.7915717363357544,
0.07618629932403564,
0.7746025919914246,
0.2563219666481018,
-0.7852816581726074,
-0.2257382869720459,
-0.9104480743408203,
0.5715669393539429,
-0... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
yoki123/onesparse-nq | yoki123 | 2023-11-19T01:55:11Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T01:55:11Z | 2023-11-19T01:15:01.000Z | 2023-11-19T01:15:01 | Entry not found | [
-0.3227649927139282,
-0.225684255361557,
0.862226128578186,
0.43461498618125916,
-0.5282987952232361,
0.7012963891029358,
0.7915717363357544,
0.07618629932403564,
0.7746025919914246,
0.2563219666481018,
-0.7852816581726074,
-0.2257382869720459,
-0.9104480743408203,
0.5715669393539429,
-0... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Arutairu/alice | Arutairu | 2023-11-19T01:16:28Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T01:16:28Z | 2023-11-19T01:15:15.000Z | 2023-11-19T01:15:15 | Entry not found | [
-0.3227649927139282,
-0.225684255361557,
0.862226128578186,
0.43461498618125916,
-0.5282987952232361,
0.7012963891029358,
0.7915717363357544,
0.07618629932403564,
0.7746025919914246,
0.2563219666481018,
-0.7852816581726074,
-0.2257382869720459,
-0.9104480743408203,
0.5715669393539429,
-0... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
ALIGAMEZ/spongebobindo | ALIGAMEZ | 2023-11-19T01:58:30Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-19T01:58:30Z | 2023-11-19T01:49:00.000Z | 2023-11-19T01:49:00 | ---
license: openrail
---
| [
-0.12853392958641052,
-0.18616779148578644,
0.6529127955436707,
0.49436280131340027,
-0.19319361448287964,
0.23607419431209564,
0.36072003841400146,
0.050563063472509384,
0.579365611076355,
0.7400140762329102,
-0.6508104205131531,
-0.23783954977989197,
-0.7102249264717102,
-0.0478260256350... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Raspberry-ai/mango8AT60TjozU_raw_v1 | Raspberry-ai | 2023-11-19T02:08:14Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T02:08:14Z | 2023-11-19T02:03:18.000Z | 2023-11-19T02:03:18 | ---
dataset_info:
features:
- name: url
dtype: string
- name: name
dtype: string
- name: color
dtype: string
- name: image_path
dtype: string
- name: brand_name
dtype: string
- name: overall_description
dtype: string
- name: style_description
dtype: string
- name: material_description
dtype: string
- name: product_category
dtype: string
- name: product_group_category
dtype: string
- name: section_name
dtype: string
- name: created_at
dtype: string
- name: image_urls
dtype: string
- name: images
dtype: string
- name: images_path
sequence: string
- name: source
dtype: string
- name: caption
dtype: string
- name: s3_uris
sequence: string
splits:
- name: train
num_bytes: 15190493
num_examples: 3212
download_size: 5398086
dataset_size: 15190493
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "mango_raw_dataset_v1"
3212 items
1. Women clothing
2. Women shoes
Ahe image resolution ranges ranges between 500*600, 600*800, 900*1300.
The resolution of front view images is on lower end.
Average images per item: 7
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | [
-0.769378125667572,
-0.23458078503608704,
-0.09746982157230377,
0.21788854897022247,
-0.802486002445221,
-0.21901118755340576,
0.12346227467060089,
-0.6413142085075378,
0.4536875784397125,
0.3366646468639374,
-0.7638823986053467,
-0.7378595471382141,
-0.6116246581077576,
-0.012946233153343... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
gilsonk12/gilson4 | gilsonk12 | 2023-11-19T02:08:50Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-19T02:08:50Z | 2023-11-19T02:07:53.000Z | 2023-11-19T02:07:53 | ---
license: openrail
---
| [
-0.12853392958641052,
-0.18616779148578644,
0.6529127955436707,
0.49436280131340027,
-0.19319361448287964,
0.23607419431209564,
0.36072003841400146,
0.050563063472509384,
0.579365611076355,
0.7400140762329102,
-0.6508104205131531,
-0.23783954977989197,
-0.7102249264717102,
-0.0478260256350... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
gunjan12/speech-to-text2 | gunjan12 | 2023-11-19T02:10:02Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T02:10:02Z | 2023-11-19T02:10:02.000Z | 2023-11-19T02:10:02 | Entry not found | [
-0.32276472449302673,
-0.22568407654762268,
0.8622258901596069,
0.4346148371696472,
-0.5282984972000122,
0.7012965679168701,
0.7915717363357544,
0.07618629932403564,
0.7746022939682007,
0.2563222646713257,
-0.785281777381897,
-0.22573848068714142,
-0.9104482531547546,
0.5715669393539429,
... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Rongchen/test_mft | Rongchen | 2023-11-27T17:49:12Z | 0 | 0 | null | [
"region:us"
] | 2023-11-27T17:49:12Z | 2023-11-19T02:22:23.000Z | 2023-11-19T02:22:23 | Entry not found | [
-0.32276472449302673,
-0.22568407654762268,
0.8622258901596069,
0.4346148371696472,
-0.5282984972000122,
0.7012965679168701,
0.7915717363357544,
0.07618629932403564,
0.7746022939682007,
0.2563222646713257,
-0.785281777381897,
-0.22573848068714142,
-0.9104482531547546,
0.5715669393539429,
... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
NoobPROBR/Kanedy | NoobPROBR | 2023-11-19T02:42:02Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-19T02:42:02Z | 2023-11-19T02:39:54.000Z | 2023-11-19T02:39:54 | ---
license: openrail
---
| [
-0.12853392958641052,
-0.18616779148578644,
0.6529127955436707,
0.49436280131340027,
-0.19319361448287964,
0.23607419431209564,
0.36072003841400146,
0.050563063472509384,
0.579365611076355,
0.7400140762329102,
-0.6508104205131531,
-0.23783954977989197,
-0.7102249264717102,
-0.0478260256350... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
ewwerpm/Chinese_word | ewwerpm | 2023-11-19T02:53:14Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T02:53:14Z | 2023-11-19T02:51:13.000Z | 2023-11-19T02:51:13 | 根据 https://github.com/ml-distribution/chinese-corpus/tree/master/emotion-dic/zhiwang 整理的 词表(一个文件),但似乎用在分词方面效果不太好。
分词代码可以参考:https://cloud.tencent.com/developer/article/2003172?pos=comment&pos=comment | [
-0.5215898752212524,
-0.6801155209541321,
0.20388163626194,
0.7440708875656128,
-0.7754079103469849,
-0.19788187742233276,
-0.2895633280277252,
-0.4178072214126587,
0.9080950617790222,
-0.09690682590007782,
-0.6237146258354187,
-0.9482772350311279,
-0.6331013441085815,
0.06839290261268616,... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
gilsonk12/gilson5 | gilsonk12 | 2023-11-19T02:58:08Z | 0 | 0 | null | [
"license:openrail",
"region:us"
] | 2023-11-19T02:58:08Z | 2023-11-19T02:56:53.000Z | 2023-11-19T02:56:53 | ---
license: openrail
---
| [
-0.12853392958641052,
-0.18616779148578644,
0.6529127955436707,
0.49436280131340027,
-0.19319361448287964,
0.23607419431209564,
0.36072003841400146,
0.050563063472509384,
0.579365611076355,
0.7400140762329102,
-0.6508104205131531,
-0.23783954977989197,
-0.7102249264717102,
-0.0478260256350... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
varox34/med-3 | varox34 | 2023-11-19T03:52:36Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T03:52:36Z | 2023-11-19T03:51:58.000Z | 2023-11-19T03:51:58 | Entry not found | [
-0.32276472449302673,
-0.22568407654762268,
0.8622258901596069,
0.4346148371696472,
-0.5282984972000122,
0.7012965679168701,
0.7915717363357544,
0.07618629932403564,
0.7746022939682007,
0.2563222646713257,
-0.785281777381897,
-0.22573848068714142,
-0.9104482531547546,
0.5715669393539429,
... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
felipeoes/long_answers_qa_blue_amazon_legislation_65k | felipeoes | 2023-11-19T04:43:58Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T04:43:58Z | 2023-11-19T04:43:44.000Z | 2023-11-19T04:43:44 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
- split: validation
path: data/validation-*
dataset_info:
features:
- name: file_name
dtype: string
- name: question
dtype: string
- name: answer
dtype: string
- name: new_questions
dtype: string
- name: new_long_answers
dtype: string
- name: url
sequence: 'null'
- name: __index_level_0__
dtype: float64
splits:
- name: train
num_bytes: 79121969.68039252
num_examples: 51931
- name: test
num_bytes: 9891198.458822442
num_examples: 6492
- name: validation
num_bytes: 9889674.86078504
num_examples: 6491
download_size: 51922822
dataset_size: 98902843.0
---
# Dataset Card for "long_answers_qa_blue_amazon_legislation_65k"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | [
-0.3656197488307953,
-0.2658000588417053,
0.17729587852954865,
0.3393447697162628,
-0.5349293947219849,
-0.08533000946044922,
0.24476636946201324,
-0.2849193513393402,
0.5926585793495178,
0.7943416237831116,
-0.7539627552032471,
-0.7679493427276611,
-0.1682989001274109,
-0.2160568088293075... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
nizaro/test | nizaro | 2023-11-20T17:21:20Z | 0 | 0 | null | [
"license:cc-by-nc-nd-4.0",
"region:us"
] | 2023-11-20T17:21:20Z | 2023-11-19T04:44:43.000Z | 2023-11-19T04:44:43 | ---
license: cc-by-nc-nd-4.0
---
| [
-0.12853392958641052,
-0.18616779148578644,
0.6529127955436707,
0.49436280131340027,
-0.19319361448287964,
0.23607419431209564,
0.36072003841400146,
0.050563063472509384,
0.579365611076355,
0.7400140762329102,
-0.6508104205131531,
-0.23783954977989197,
-0.7102249264717102,
-0.0478260256350... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
Yunij/dataset | Yunij | 2023-11-19T05:03:41Z | 0 | 0 | null | [
"license:apache-2.0",
"region:us"
] | 2023-11-19T05:03:41Z | 2023-11-19T05:01:34.000Z | 2023-11-19T05:01:34 | ---
license: apache-2.0
dataset_info:
features:
- name: answers
sequence: string
- name: passages
struct:
- name: is_selected
sequence: int32
- name: passage_text
sequence: string
- name: url
sequence: string
- name: query
dtype: string
- name: query_id
dtype: int32
- name: query_type
dtype: string
- name: wellFormedAnswers
sequence: 'null'
- name: ai_answers
dtype: string
splits:
- name: train
num_bytes: 85230946
num_examples: 20000
download_size: 42598247
dataset_size: 85230946
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
| [
-0.12853392958641052,
-0.18616779148578644,
0.6529127955436707,
0.49436280131340027,
-0.19319361448287964,
0.23607419431209564,
0.36072003841400146,
0.050563063472509384,
0.579365611076355,
0.7400140762329102,
-0.6508104205131531,
-0.23783954977989197,
-0.7102249264717102,
-0.0478260256350... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
imseldrith/mydataset | imseldrith | 2023-11-19T06:09:34Z | 0 | 0 | null | [
"license:mit",
"region:us"
] | 2023-11-19T06:09:34Z | 2023-11-19T06:06:38.000Z | 2023-11-19T06:06:38 | ---
license: mit
---
| [
-0.1285335123538971,
-0.1861683875322342,
0.6529128551483154,
0.49436232447624207,
-0.19319400191307068,
0.23607441782951355,
0.36072009801864624,
0.05056373029947281,
0.5793656706809998,
0.7400146722793579,
-0.650810182094574,
-0.23784008622169495,
-0.7102247476577759,
-0.0478255338966846... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
CCCCCC/BPO | CCCCCC | 2023-11-20T05:42:13Z | 0 | 1 | null | [
"task_categories:text-generation",
"size_categories:10K<n<100K",
"language:en",
"license:apache-2.0",
"human_feedback",
"arxiv:2311.04155",
"region:us"
] | 2023-11-20T05:42:13Z | 2023-11-19T06:20:37.000Z | 2023-11-19T06:20:37 | ---
license: apache-2.0
task_categories:
- text-generation
language:
- en
tags:
- human_feedback
size_categories:
- 10K<n<100K
pretty_name: BPO
---
# Dataset Card for Black-box Prompt Optimization (BPO)
## Dataset Description
- **Repository:** https://github.com/thu-coai/BPO
- **Paper:** https://arxiv.org/abs/2311.04155
- **Model:**
### Data Summary
To advance the development of alignment in language models, we introduce a black-box alignment method. BPO enhances the alignment of various Large Language Models (LLMs) with human preferences using only a plug-and-play model. To further promote alignment work from the prompting perspective, we are releasing the BPO Dataset. This dataset comprises 14,395 entries of prompt optimization pairs, constructed using open-source feedback data with OpenAI's `gpt-3.5-turbo` engine. We have thoroughly ensured the quality and diversity of the data through careful filtering and correction.
Please refer to our [paper](https://arxiv.org/abs/2311.04155) for further details.
### Supported Tasks
The BPO Dataset is designed for Prompt Optimization / Engineering task. As we have carefully flitered and curated the dataset, it is also a good choice for SFT or RLHF.
### Language
The data in BPO are in English.
## Data Structure
### Data Instances
An example of "train" looks as follows:
```json
{
"prompt": "Minimize the following data set\n[3, 4, 6, 7, 8, 10]",
"optimized_prompt": "Provide a step-by-step solution to minimize the following data set: [3, 4, 6, 7, 8, 10].",
"good_res": "To minimize the values in the given data set, which is a list of numbers, you could sort them in ascending order as follows: [3, 4, 6, 7, 8, 10]. The minimized (least) value in this data set would be 3.",
"bad_res": "[3, 4, 6, 7, 8, 10]"
}
```
### Data Fields
The data fields are as follows:
* `prompt`: the original user input
* `optimized_prompt`: the optimized prompt based on feedback
* `good_res`: human-chosen (preferred) response
* `bad_res`: human-rejected response
### Data Splits
| | train | valid | test |
|---------------|------:|------:|------:|
| BPO | 13895 | 300 | 200 |
## Additional Information
### Source Data
Our feedback data come from the following open-sourced datasets:
- [OASST1](https://huggingface.co/datasets/OpenAssistant/oasst1)
- [hh-rlhf](https://huggingface.co/datasets/Anthropic/hh-rlhf)
- [Alpaca-GPT4](https://github.com/Instruction-Tuning-with-GPT-4/GPT-4-LLM/blob/main/data/comparison_data_v2.json)
- [Chatbot Arena Conversation](https://huggingface.co/datasets/lmsys/chatbot_arena_conversations)
### Other Known Limitations
- Feedback Data Quality: Due to our use of open-source feedback data, some human preferences included may not be entirely accurate.
- Task Diversity: Despite our efforts to filter and achieve a diverse dataset, these open-source datasets are clearly not sufficient to cover the wide variety of user queries.
- Optimized Prompts: The optimized prompts are auto-generated by `gpt-3.5-turbo` based on feedback data. Even though we have manually reviewed and modified the dataset, we cannot guarantee that all prompt optimizations are correct.
### Citation Information
```
@article{cheng2023black,
title={Black-Box Prompt Optimization: Aligning Large Language Models without Model Training},
author={Cheng, Jiale and Liu, Xiao and Zheng, Kehan and Ke, Pei and Wang, Hongning and Dong, Yuxiao and Tang, Jie and Huang, Minlie},
journal={arXiv preprint arXiv:2311.04155},
year={2023}
}
``` | [
-0.4482549726963043,
-0.8218920230865479,
0.38871148228645325,
0.24881426990032196,
-0.16463564336299896,
-0.154157355427742,
-0.30563029646873474,
-0.2606401741504669,
-0.14054790139198303,
0.2849639654159546,
-1.034820318222046,
-0.6085588932037354,
-0.13851796090602875,
0.35266014933586... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
hajar817/common_voice_13_0_hi_pseudo_labelled | hajar817 | 2023-11-19T06:31:42Z | 0 | 0 | null | [
"region:us"
] | 2023-11-19T06:31:42Z | 2023-11-19T06:31:42.000Z | 2023-11-19T06:31:42 | Entry not found | [
-0.3227645754814148,
-0.22568479180335999,
0.8622263669967651,
0.43461522459983826,
-0.52829909324646,
0.7012971639633179,
0.7915719747543335,
0.07618614286184311,
0.774603009223938,
0.2563217282295227,
-0.7852813005447388,
-0.22573819756507874,
-0.9104475975036621,
0.5715674161911011,
-... | null | null | null | null | null | null | null | null | null | null | null | null | null | |
PaulLoisel/mlp_splitted | PaulLoisel | 2023-11-20T08:36:47Z | 0 | 0 | null | [
"region:us"
] | 2023-11-20T08:36:47Z | 2023-11-19T06:59:33.000Z | 2023-11-19T06:59:33 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
- split: val
path: data/val-*
dataset_info:
features:
- name: purchased_products
dtype: float64
- name: review_time_spent
dtype: float64
- name: label
dtype: int64
- name: text
dtype: string
- name: Beauty Products
dtype: float64
- name: Furniture
dtype: float64
- name: Toys
dtype: float64
splits:
- name: train
num_bytes: 636.6
num_examples: 3
- name: test
num_bytes: 212.2
num_examples: 1
- name: val
num_bytes: 212.2
num_examples: 1
download_size: 15368
dataset_size: 1061.0
---
# Dataset Card for "mlp_splitted"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) | [
-0.6441695690155029,
-0.30997607111930847,
0.05793685466051102,
0.4224841296672821,
-0.19815045595169067,
0.042684510350227356,
0.2390475571155548,
-0.2079963982105255,
0.8883045315742493,
0.5699364542961121,
-0.8985303640365601,
-0.56309974193573,
-0.6105470061302185,
-0.2661474049091339,... | null | null | null | null | null | null | null | null | null | null | null | null | null |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.