datasetId stringlengths 2 117 | card stringlengths 19 1.01M |
|---|---|
Anssi/europarl_dbca_splits | ---
configs:
- config_name: comdiv0.0_en_fr
data_files:
- split: train
path: "comdiv0.0_en_fr/train.jsonl"
- split: test
path: "comdiv0.0_en_fr/test.jsonl"
- config_name: comdiv0.0_en_de
data_files:
- split: train
path: "comdiv0.0_en_de/train.jsonl"
- split: test
path: "comdiv0.0_en_de/test.jsonl"
- config_name: comdiv0.0_en_fi
data_files:
- split: train
path: "comdiv0.0_en_fi/train.jsonl"
- split: test
path: "comdiv0.0_en_fi/test.jsonl"
- config_name: comdiv0.0_en_el
data_files:
- split: train
path: "comdiv0.0_en_el/train.jsonl"
- split: test
path: "comdiv0.0_en_el/test.jsonl"
- config_name: comdiv1.0_en_fr
data_files:
- split: train
path: "comdiv1.0_en_fr/train.jsonl"
- split: test
path: "comdiv1.0_en_fr/test.jsonl"
- config_name: comdiv1.0_en_de
data_files:
- split: train
path: "comdiv1.0_en_de/train.jsonl"
- split: test
path: "comdiv1.0_en_de/test.jsonl"
- config_name: comdiv1.0_en_fi
data_files:
- split: train
path: "comdiv1.0_en_fi/train.jsonl"
- split: test
path: "comdiv1.0_en_fi/test.jsonl"
- config_name: comdiv1.0_en_el
data_files:
- split: train
path: "comdiv1.0_en_el/train.jsonl"
- split: test
path: "comdiv1.0_en_el/test.jsonl"
---
|
lsimon/github-issues | ---
dataset_info:
features:
- name: url
dtype: string
- name: repository_url
dtype: string
- name: labels_url
dtype: string
- name: comments_url
dtype: string
- name: events_url
dtype: string
- name: html_url
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: number
dtype: int64
- name: title
dtype: string
- name: user
struct:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: labels
list:
- name: id
dtype: int64
- name: node_id
dtype: string
- name: url
dtype: string
- name: name
dtype: string
- name: color
dtype: string
- name: default
dtype: bool
- name: description
dtype: string
- name: state
dtype: string
- name: locked
dtype: bool
- name: assignee
struct:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: assignees
list:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: milestone
struct:
- name: url
dtype: string
- name: html_url
dtype: string
- name: labels_url
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: number
dtype: int64
- name: title
dtype: string
- name: description
dtype: string
- name: creator
struct:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: open_issues
dtype: int64
- name: closed_issues
dtype: int64
- name: state
dtype: string
- name: created_at
dtype: timestamp[s]
- name: updated_at
dtype: timestamp[s]
- name: due_on
dtype: 'null'
- name: closed_at
dtype: 'null'
- name: comments
sequence: string
- name: created_at
dtype: timestamp[s]
- name: updated_at
dtype: timestamp[s]
- name: closed_at
dtype: timestamp[s]
- name: author_association
dtype: string
- name: active_lock_reason
dtype: 'null'
- name: draft
dtype: bool
- name: pull_request
struct:
- name: url
dtype: string
- name: html_url
dtype: string
- name: diff_url
dtype: string
- name: patch_url
dtype: string
- name: merged_at
dtype: timestamp[s]
- name: body
dtype: string
- name: reactions
struct:
- name: url
dtype: string
- name: total_count
dtype: int64
- name: '+1'
dtype: int64
- name: '-1'
dtype: int64
- name: laugh
dtype: int64
- name: hooray
dtype: int64
- name: confused
dtype: int64
- name: heart
dtype: int64
- name: rocket
dtype: int64
- name: eyes
dtype: int64
- name: timeline_url
dtype: string
- name: performed_via_github_app
dtype: 'null'
- name: state_reason
dtype: string
- name: is_pull_request
dtype: bool
splits:
- name: train
num_bytes: 5004849
num_examples: 500
download_size: 1349309
dataset_size: 5004849
---
# Dataset Card for "github-issues"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
nlplabtdtu/OpenOrca-processes-QA-vi | ---
dataset_info:
features:
- name: id
dtype: string
- name: system_prompt
dtype: string
- name: question
dtype: string
- name: response
dtype: string
splits:
- name: train
num_bytes: 36733040
num_examples: 32860
download_size: 12971853
dataset_size: 36733040
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "OpenOrca-processes-QA-vi"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
kartikagg98/HINMIX_hi-en | ---
license: apache-2.0
dataset_info:
- config_name: lcsalign-en
features:
- name: text
dtype: string
splits:
- name: test
num_bytes: 305023
num_examples: 2507
- name: train
num_bytes: 455104487
num_examples: 4200000
- name: valid
num_bytes: 21217
num_examples: 280
download_size: 318440274
dataset_size: 455430727
- config_name: lcsalign-hi
features:
- name: text
dtype: string
splits:
- name: test
num_bytes: 770118
num_examples: 2507
- name: train
num_bytes: 1084853757
num_examples: 4200000
- name: valid
num_bytes: 45670
num_examples: 280
download_size: 470820787
dataset_size: 1085669545
- config_name: lcsalign-hicm
features:
- name: text
dtype: string
splits:
- name: test
num_bytes: 561442
num_examples: 2507
- name: train
num_bytes: 872213032
num_examples: 4200000
- name: valid
num_bytes: 34530
num_examples: 280
download_size: 455501891
dataset_size: 872809004
- config_name: lcsalign-hicmdvg
features:
- name: text
dtype: string
splits:
- name: test
num_bytes: 798126
num_examples: 2507
- name: train
num_bytes: 1104443176
num_examples: 4200000
- name: valid
num_bytes: 47513
num_examples: 280
download_size: 491775164
dataset_size: 1105288815
- config_name: lcsalign-hicmrom
features:
- name: text
dtype: string
splits:
- name: test
num_bytes: 338176
num_examples: 2507
- name: train
num_bytes: 467370942
num_examples: 4200000
- name: valid
num_bytes: 20431
num_examples: 280
download_size: 337385029
dataset_size: 467729549
- config_name: lcsalign-noisyhicmrom
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 462418855
num_examples: 4200000
- name: test
num_bytes: 334401
num_examples: 2507
- name: valid
num_bytes: 20246
num_examples: 280
download_size: 379419827
dataset_size: 462773502
configs:
- config_name: lcsalign-en
data_files:
- split: test
path: lcsalign-en/test-*
- split: train
path: lcsalign-en/train-*
- split: valid
path: lcsalign-en/valid-*
- config_name: lcsalign-hi
data_files:
- split: test
path: lcsalign-hi/test-*
- split: train
path: lcsalign-hi/train-*
- split: valid
path: lcsalign-hi/valid-*
- config_name: lcsalign-hicm
data_files:
- split: test
path: lcsalign-hicm/test-*
- split: train
path: lcsalign-hicm/train-*
- split: valid
path: lcsalign-hicm/valid-*
- config_name: lcsalign-hicmdvg
data_files:
- split: test
path: lcsalign-hicmdvg/test-*
- split: train
path: lcsalign-hicmdvg/train-*
- split: valid
path: lcsalign-hicmdvg/valid-*
- config_name: lcsalign-hicmrom
data_files:
- split: test
path: lcsalign-hicmrom/test-*
- split: train
path: lcsalign-hicmrom/train-*
- split: valid
path: lcsalign-hicmrom/valid-*
- config_name: lcsalign-noisyhicmrom
data_files:
- split: train
path: lcsalign-noisyhicmrom/train-*
- split: test
path: lcsalign-noisyhicmrom/test-*
- split: valid
path: lcsalign-noisyhicmrom/valid-*
task_categories:
- translation
language:
- hi
- en
tags:
- codemix
- indicnlp
- hindi
- english
- multilingual
pretty_name: Hindi-English Codemix Datasets
size_categories:
- 1M<n<10M
--- |
anderloh/5ClassMotorizedLarge | ---
dataset_info:
- config_name: finetune
features:
- name: audio
dtype: audio
- name: label
dtype:
class_label:
names:
'0': Helicopter
'1': Jet
'2': Racecar
'3': Train_
'4': Truck
splits:
- name: train
num_bytes: 448111066.0
num_examples: 2000
- name: validation
num_bytes: 224056125.0
num_examples: 1000
download_size: 672179128
dataset_size: 672167191.0
- config_name: pretraining
features:
- name: audio
dtype: audio
- name: label
dtype:
class_label:
names:
'0': Helicopter
'1': Jet
'2': Racecar
'3': Train_
'4': Truck
splits:
- name: train
num_bytes: 2381396801.811
num_examples: 10629
- name: test
num_bytes: 905396784.689
num_examples: 4041
download_size: 3286168021
dataset_size: 3286793586.5
configs:
- config_name: finetune
data_files:
- split: train
path: finetune/train-*
- split: validation
path: finetune/validation-*
- config_name: pretraining
data_files:
- split: train
path: pretraining/train-*
- split: test
path: pretraining/test-*
---
# Dataset Card for "5ClassMotorizedLarge"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
liuyanchen1015/MULTI_VALUE_rte_more_much | ---
dataset_info:
features:
- name: sentence1
dtype: string
- name: sentence2
dtype: string
- name: label
dtype: int64
- name: idx
dtype: int64
- name: value_score
dtype: int64
splits:
- name: test
num_bytes: 80145
num_examples: 187
- name: train
num_bytes: 84239
num_examples: 189
download_size: 117525
dataset_size: 164384
---
# Dataset Card for "MULTI_VALUE_rte_more_much"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
parambharat/tamil_asr_corpus | ---
annotations_creators:
- found
language:
- ta
language_creators:
- found
license:
- cc-by-4.0
multilinguality:
- monolingual
pretty_name: Tamil ASR Corpus
size_categories:
- 100K<n<1M
source_datasets:
- extended|common_voice
- extended|openslr
tags: []
task_categories:
- automatic-speech-recognition
task_ids: []
---
# Dataset Card for [Dataset Name]
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:**
- **Repository:**
- **Paper:**
- **Leaderboard:**
- **Point of Contact:**
### Dataset Summary
[More Information Needed]
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
Thanks to [@parambharat](https://github.com/parambharat) for adding this dataset.
|
datahrvoje/twitter_dataset_1713194546 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 9457
num_examples: 23
download_size: 9806
dataset_size: 9457
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
communityai/Telugu-LLM-Labs___telugu_alpaca_yahma_cleaned_filtered_romanized | ---
dataset_info:
features:
- name: source
dtype: string
- name: conversations
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 85288011.0
num_examples: 28910
download_size: 29059985
dataset_size: 85288011.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
odepraz/rvl_cdip_5percentofdata | ---
license: unknown
---
|
EleutherAI/pile | ---
annotations_creators:
- no-annotation
language_creators:
- found
language:
- en
license: other
multilinguality:
- monolingual
pretty_name: the Pile
size_categories:
- 100B<n<1T
source_datasets:
- original
task_categories:
- text-generation
- fill-mask
task_ids:
- language-modeling
- masked-language-modeling
paperswithcode_id: the-pile
---
# Dataset Card for The Pile
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
This model card is a work in progress. Please also see [our datasheet](https://arxiv.org/abs/2201.07311) for more detailed info.
## Dataset Description
- **Homepage:** https://pile.eleuther.ai/
- **Repository:** https://github.com/EleutherAI/the-pile
- **Paper:** [The Pile: An 800GB Dataset of Diverse Text for Language Modeling](https://arxiv.org/abs/2101.00027)
- **Leaderboard:**
- **Point of Contact:** [EleutherAI](mailto:contact@eleuther.ai)
- **Datasheet:** [Datasheet for the Pile](https://arxiv.org/abs/2201.07311)
### Dataset Summary
The Pile is a 825 GiB diverse, open source language modelling data set that consists of 22 smaller, high-quality
datasets combined together.
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
This dataset is in English (`EN`).
## Dataset Structure
### Data Instances
#### all
```
{
'meta': {'pile_set_name': 'Pile-CC'},
'text': 'It is done, and submitted. You can play “Survival of the Tastiest” on Android, and on the web. Playing on...'
}
```
<details>
<summary>Expand to see individual components</summary>
#### enron_emails
```
{
'text': 'Name\t\t\tNew Title\t\t\t\tEffective Date\t\t\tMid Year promotion Yes/No\n\nFloyd, Jodie\t\tSr Cust Svc Rep (no change)\t\t7/16/01\t\t\t\tNo\n\nBuehler, Craig\t\tSr Mkt/Sup Analyst (no change)\t\t7/16/01\t\t\t\tNo\n\nWagoner, Mike\t\tTeam Advisor - Gas Control\t\t7/1/01\t\t\t\tNo\n\nClapper, Karen\t\tSr Cust Svc Rep\t\t\t8/1/01\t\t\t\tYes\n\nGreaney, Chris\t\tSr Cust Svc Rep\t\t\t8/1/01\t\t\t\tYes\n\nWilkens, Jerry\t\tSr Cust Svc Rep\t\t\t8/1/01\t\t\t\tYes\n\nMinton, Kevin\t\tPipeline Controller\t\t\t8/1/01\t\t\t\tYes\n\nCox, Don\t\tPipeline Controller\t\t\t8/1/01\t\t\t\tYes\n\nHanagriff, Richard\tSr Accounting Control Spec\t\t8/1/01\t\t\t\tYes\n\n\nThanks,\nMS'
'meta': "{}",
}
```
#### europarl
```
{
'text': 'Uvádění biocidních přípravků na trh - Nový návrh revize týkající se biocidních přípravků (rozprava) \nPředsedající\nDalším bodem je společná rozprava o následujících tématech:\nzpráva paní Sârbuové za Výbor pro životní prostředí, veřejné zdraví a bezpečnost potravin o návrhu...'
'meta': "{'language': 'cs'}",
}
```
#### free_law
```
{
'meta': "{'case_jurisdiction': 'scotus.tar.gz', 'case_ID': '110921.json','date_created': '2010-04-28T17:12:49Z'}",
'text': '\n461 U.S. 238 (1983)\nOLIM ET AL.\nv.\nWAKINEKONA\nNo. 81-1581.\nSupreme Court of United States.\nArgued...'
}
```
#### hacker_news
```
{
'text': "\nChina Deserves Donald Trump - rm2889\nhttps://www.nytimes.com/2019/05/21/opinion/china-trump-trade.html\n======\nNotPaidToPost\n> so he’d be wise to curb his nationalistic “no-one-tells-China-what-to-do”\n> bluster\n\nThis comment highlights both ignorance of Chinese history and continuing\nAmerican arrogance.\n\nChina has been painfully dictated what to do during the last 200 years. This\nhas had a profound effect on the country and has led to the collapse of\nimperial rule and the drive to 'rejuvenate'...",
'meta': "{'id': '19979654'}",
}
```
#### nih_exporter
```
{
'text': "The National Domestic Violence Hotline (NDVH) and the National Dating Abuse Helpline (NDAH), which are supported by the Division of Family Violence Prevention and Services within the Family and Youth Services Bureau, serve as critical partners in the intervention, prevention, and resource assistance efforts of the network of family violence, domestic violence, and dating violence service providers. They provide crisis intervention and support services; information about resources on domestic...",
'meta': " {'APPLICATION_ID': 100065}",
}
```
#### pubmed
```
{
'meta': {'pmid': 11409574, 'language': 'eng'},
'text': 'Epidemiology of hypoxaemia in children with acute lower respiratory infection.\nTo determine the prevalence of hypoxaemia in children aged under 5 years suffering acute lower respiratory infections (ALRI), the risk factors for hypoxaemia in children under 5 years of age with ALRI, and the association of hypoxaemia with an increased risk of dying in children of the same age. Systematic review of the published literature. Out-patient clinics, emergency departments and hospitalisation wards in 23 health centres from 10 countries. Cohort studies reporting the frequency of hypoxaemia in children under 5 years of age with ALRI, and the association between hypoxaemia and the risk of dying. Prevalence of hypoxaemia measured in children with ARI and relative risks for the association between the severity of illness and the frequency of hypoxaemia, and between hypoxaemia and the risk of dying. Seventeen published studies were found that included 4,021 children under 5 with acute respiratory infections (ARI) and reported the prevalence of hypoxaemia. Out-patient children and those with a clinical diagnosis of upper ARI had a low risk of hypoxaemia (pooled estimate of 6% to 9%). The prevalence increased to 31% and to 43% in patients in emergency departments and in cases with clinical pneumonia, respectively, and it was even higher among hospitalised children (47%) and in those with radiographically confirmed pneumonia (72%). The cumulated data also suggest that hypoxaemia is more frequent in children living at high altitude. Three papers reported an association between hypoxaemia and death, with relative risks varying between 1.4 and 4.6. Papers describing predictors of hypoxaemia have focused on clinical signs for detecting hypoxaemia rather than on identifying risk factors for developing this complication. Hypoxaemia is a common and potentially lethal complication of ALRI in children under 5, particularly among those with severe disease and those living at high altitude. Given the observed high prevalence of hypoxaemia and its likely association with increased mortality, efforts should be made to improve the detection of hypoxaemia and to provide oxygen earlier to more children with severe ALRI.'
}
```
#### pubmed_central
```
{
'meta': "{id': 'PMC5595690'}",
'text': 'Introduction {#acel12642-sec-0001}\n============\n\nAlzheimer\\\'s disease (AD), the most common cause of...'
}
```
#### ubuntu_irc
```
{
'text': "#ubuntu 2004-07-05\n* Window 3\n* \tServer: [0] <None>\n* \tScreen: 0x817e90c\n* \tGeometry Info: [0 11 0 11 11 11] \n* \tCO, LI are [94 49] \n* \tCurrent channel: #ubuntu\n* \tQuery User: <None> \n*\tPrompt: <None>\n* \tSecond status line is OFF\n* \tSplit line is ON triple is OFF\n* \tLogging is ON\n* \tLogfile is irclogs/ubuntu.log\n* \tNotification is OFF\n* \tHold mode is OFF\n* \tWindow level is NONE\n* \tLastlog level is ALL\n* \tNotify level is ALL\n<mdz> lifeless: using tla effectively for all packages in Warty requ...",
'meta': "{'channel': 'ubuntu', 'month': 7}"
}
```
#### uspto
```
{
'text': "1. Field of the Invention\nIn an extensive plant breeding program, Grant Merrill, originator and now deceased, originated a large number of new and distinct varieties of fruit trees, and which included the herein-claimed variety of peach tree. Such plant breeding program was undertaken in originator's experimental orchard located near Exeter, Tulare County, Calif.\n2. Prior Varieties\nAmong the existent varieties of peach trees which were known to originator, particular reference is made to Gemfree (U.S. Plant Pat. No. 1,409) and June Lady (U.S. Plant Pat. No. 3,022) hereinafter mentioned for the purpose of comparison.",
'meta': "{'bibliographic_information': {'Patent Number': 'PP0049700', 'Series Code': '6', 'Application Number': '2845415', 'Application Type': '6', 'Art unit': '337', 'Application Filing Date': '19810720', 'Title of Invention': 'Peach tree (A3-10)', 'Issue Date': '19830104', 'Number of Claims': '1', 'Exemplary Claim Number(s)': '1', 'Primary Examiner': 'Bagwill; Robert E.', 'Number of Drawing Sheets': '1', 'Number of figures': '1'}, 'source_file': 'https://bulkdata.uspto.gov/data/patent/grant/redbook/fulltext/1983/pftaps19830104_wk01.zip', 'abstract': 'A peach tree which is large, vigorous, and spreading; foliated with large, lanceolate leaves having a finely serrate margin, a petiole of medium length and thickness, and medium size, reniform glands; blooms from medium size, conic, plump, pubescent buds; the flowers, medium in blooming period compared with other varieties, being of medium size, and pink; and is a regular and very productive bearer of medium but variable size, round truncate, clingstone fruit having yellow skin substantially overspread with red, yellow flesh mottled with red adjacent the skin, and an amber stone.', 'classifications': [{'OCL': ['Plt', '43'], 'EDF': ['3'], 'ICL': ['A01H', '503'], 'FSC': ['Plt'], 'FSS': ['43']}], 'inventors': [{'inventor name': 'Merrill, deceased; Grant', 'Street': '325 Breese Ave.', 'City': 'late of Red Bluff', 'State': 'CA'}, {'inventor name': 'Merrill, executrix; by Lucile B.', 'Street': '325 Breese Ave.', 'City': 'Red Bluff', 'State': 'CA', 'Zip code': '96080'}]}"
}
```
#### github
```
{
'text': "/* filesystem.c\n * Filesystem utility routines\n *\n * Wireshark - Network traffic analyzer\n * By Gerald Combs <gerald@wireshark.org>\n * Copyright 1998 Gerald Combs\n *\n * SPDX-License-Identifier: GPL-2.0-or-later\n */\n\n#include <config.h>\n\n#include <stdio.h>\n#include <stdlib.h>\n#include <string.h>\n#include <errno.h>\n\n#include <glib.h>...",
'meta': "{'repo_name': 'wireshark/wireshark', 'stars': '2789', 'repo_language': 'C', 'file_name': 'packet-mpeg-audio-template.c', 'mime_type': 'text/x-c'}"
}
```
</details>
### Data Fields
#### all
- `text` (str): Text.
- `meta` (dict): Metadata of the data instance with keys:
- pile_set_name: Name of the subset.
<details>
<summary>Expand to see individual components</summary>
#### enron_emails
- `text` (str): Text.
- `meta` (str): Metadata of the data instance.
#### europarl
- `text` (str): Text.
- `meta` (str): Metadata of the data instance with: language.
#### free_law
- `text` (str): Text.
- `meta` (str): Metadata of the data instance with: case_ID, case_jurisdiction, date_created.
#### hacker_news
- `text` (str): Text.
- `meta` (str): Metadata of the data instance with: id.
#### nih_exporter
- `text` (str): Text.
- `meta` (str): Metadata of the data instance with: APPLICATION_ID.
#### pubmed
- `text` (str): Text.
- `meta` (str): Metadata of the data instance with: pmid, language.
#### pubmed_central
- `text` (str): Text.
- `meta` (str): Metadata of the data instance with: ID of the data instance.
#### ubuntu_irc
- `text` (str): Text.
- `meta` (str): Metadata of the data instance with: channel, month.
#### uspto
- `text` (str): Text.
- `meta` (str): Metadata of the data instance with: bibliographic_information, source_file, abstract, classifications,
inventors.
#### github
- `text` (str): Text.
- `meta` (str): Metadata of the data instance with: repo_name, stars, repo_language, file_name, mime_type.
### Data Splits
The "all" configuration is composed of 3 splits: train, validation and test.
</details>
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
This dataset was primarily curated by Leo Gao and Stella Biderman, with assistance from other authors of the Pile paper.
### Licensing Information
Please refer to the specific license depending on the subset you use:
- PubMed Central: [MIT License](https://github.com/EleutherAI/pile-pubmedcentral/blob/master/LICENSE)
### Citation Information
```
@article{gao2020pile,
title={The {P}ile: An 800{GB} dataset of diverse text for language modeling},
author={Gao, Leo and Biderman, Stella and Black, Sid and Golding, Laurence and Hoppe, Travis and Foster, Charles and Phang, Jason and He, Horace and Thite, Anish and Nabeshima, Noa and others},
journal={arXiv preprint arXiv:2101.00027},
year={2020}
}
@article{biderman2022datasheet,
title={Datasheet for the pile},
author={Biderman, Stella and Bicheno, Kieran and Gao, Leo},
journal={arXiv preprint arXiv:2201.07311},
year={2022}
}
```
### Contributions
Thanks to [@github-username](https://github.com/<github-username>) for adding this dataset. |
LULab/myPOS | ---
license: cc-by-4.0
---
|
open-llm-leaderboard/details_FuseAI__FuseChat-7B-Slerp | ---
pretty_name: Evaluation run of FuseAI/FuseChat-7B-Slerp
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [FuseAI/FuseChat-7B-Slerp](https://huggingface.co/FuseAI/FuseChat-7B-Slerp) on\
\ the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_FuseAI__FuseChat-7B-Slerp\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-02-29T19:25:05.731431](https://huggingface.co/datasets/open-llm-leaderboard/details_FuseAI__FuseChat-7B-Slerp/blob/main/results_2024-02-29T19-25-05.731431.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6405573713281516,\n\
\ \"acc_stderr\": 0.032222929251837,\n \"acc_norm\": 0.6422501101500138,\n\
\ \"acc_norm_stderr\": 0.032873048974703764,\n \"mc1\": 0.3072215422276622,\n\
\ \"mc1_stderr\": 0.016150201321323016,\n \"mc2\": 0.45618408792342546,\n\
\ \"mc2_stderr\": 0.015076169944832135\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5793515358361775,\n \"acc_stderr\": 0.014426211252508397,\n\
\ \"acc_norm\": 0.6262798634812287,\n \"acc_norm_stderr\": 0.014137708601759093\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6495717984465246,\n\
\ \"acc_stderr\": 0.004761289867046068,\n \"acc_norm\": 0.841665006970723,\n\
\ \"acc_norm_stderr\": 0.003643087529213717\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\
\ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n\
\ \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n\
\ \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.0378272898086547,\n\
\ \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.0378272898086547\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n\
\ \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \
\ \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n\
\ \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n\
\ \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \
\ \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \
\ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\"\
: 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n\
\ \"acc_stderr\": 0.036430371689585475,\n \"acc_norm\": 0.6473988439306358,\n\
\ \"acc_norm_stderr\": 0.036430371689585475\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n\
\ \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n\
\ \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5702127659574469,\n \"acc_stderr\": 0.03236214467715564,\n\
\ \"acc_norm\": 0.5702127659574469,\n \"acc_norm_stderr\": 0.03236214467715564\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5526315789473685,\n\
\ \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.5526315789473685,\n\
\ \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n\
\ \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.42857142857142855,\n \"acc_stderr\": 0.025487187147859375,\n \"\
acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.025487187147859375\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5396825396825397,\n\
\ \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.5396825396825397,\n\
\ \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \
\ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n\
\ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n\
\ \"acc_stderr\": 0.02328766512726854,\n \"acc_norm\": 0.7870967741935484,\n\
\ \"acc_norm_stderr\": 0.02328766512726854\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.03517603540361008,\n\
\ \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.03517603540361008\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\"\
: 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009182,\n\
\ \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009182\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7777777777777778,\n \"acc_stderr\": 0.02962022787479048,\n \"\
acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.02962022787479048\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.9222797927461139,\n \"acc_stderr\": 0.019321805557223137,\n\
\ \"acc_norm\": 0.9222797927461139,\n \"acc_norm_stderr\": 0.019321805557223137\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.02394672474156397,\n \
\ \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.02394672474156397\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131143,\n \
\ \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131143\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6554621848739496,\n \"acc_stderr\": 0.030868682604121622,\n\
\ \"acc_norm\": 0.6554621848739496,\n \"acc_norm_stderr\": 0.030868682604121622\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\"\
: 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n\
\ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n\
\ \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n\
\ \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\
: {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n\
\ \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.7892156862745098,\n \"acc_stderr\": 0.02862654791243742,\n \"\
acc_norm\": 0.7892156862745098,\n \"acc_norm_stderr\": 0.02862654791243742\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290916,\n \
\ \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290916\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7085201793721974,\n\
\ \"acc_stderr\": 0.030500283176545847,\n \"acc_norm\": 0.7085201793721974,\n\
\ \"acc_norm_stderr\": 0.030500283176545847\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306086,\n\
\ \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306086\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7520661157024794,\n \"acc_stderr\": 0.03941897526516303,\n \"\
acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.03941897526516303\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n\
\ \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n\
\ \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n\
\ \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n\
\ \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n\
\ \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822583,\n\
\ \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822583\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n\
\ \"acc_stderr\": 0.021901905115073325,\n \"acc_norm\": 0.8717948717948718,\n\
\ \"acc_norm_stderr\": 0.021901905115073325\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8135376756066411,\n\
\ \"acc_stderr\": 0.013927751372001512,\n \"acc_norm\": 0.8135376756066411,\n\
\ \"acc_norm_stderr\": 0.013927751372001512\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.024105712607754307,\n\
\ \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.024105712607754307\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.39888268156424583,\n\
\ \"acc_stderr\": 0.01637696614261008,\n \"acc_norm\": 0.39888268156424583,\n\
\ \"acc_norm_stderr\": 0.01637696614261008\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.7320261437908496,\n \"acc_stderr\": 0.025360603796242557,\n\
\ \"acc_norm\": 0.7320261437908496,\n \"acc_norm_stderr\": 0.025360603796242557\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n\
\ \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n\
\ \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.02492200116888633,\n\
\ \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.02492200116888633\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4645390070921986,\n \"acc_stderr\": 0.029752389657427047,\n \
\ \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.029752389657427047\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4804432855280313,\n\
\ \"acc_stderr\": 0.012760464028289299,\n \"acc_norm\": 0.4804432855280313,\n\
\ \"acc_norm_stderr\": 0.012760464028289299\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.7022058823529411,\n \"acc_stderr\": 0.027778298701545443,\n\
\ \"acc_norm\": 0.7022058823529411,\n \"acc_norm_stderr\": 0.027778298701545443\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6650326797385621,\n \"acc_stderr\": 0.019094228167000318,\n \
\ \"acc_norm\": 0.6650326797385621,\n \"acc_norm_stderr\": 0.019094228167000318\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n\
\ \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n\
\ \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274645,\n\
\ \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274645\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n\
\ \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n\
\ \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \
\ \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n\
\ \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n\
\ \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8538011695906432,\n \"acc_stderr\": 0.027097290118070806,\n\
\ \"acc_norm\": 0.8538011695906432,\n \"acc_norm_stderr\": 0.027097290118070806\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3072215422276622,\n\
\ \"mc1_stderr\": 0.016150201321323016,\n \"mc2\": 0.45618408792342546,\n\
\ \"mc2_stderr\": 0.015076169944832135\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7947908445146015,\n \"acc_stderr\": 0.01135031570746207\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6254738438210766,\n \
\ \"acc_stderr\": 0.013331774158491391\n }\n}\n```"
repo_url: https://huggingface.co/FuseAI/FuseChat-7B-Slerp
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|arc:challenge|25_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|gsm8k|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hellaswag|10_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-29T19-25-05.731431.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-29T19-25-05.731431.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- '**/details_harness|winogrande|5_2024-02-29T19-25-05.731431.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-02-29T19-25-05.731431.parquet'
- config_name: results
data_files:
- split: 2024_02_29T19_25_05.731431
path:
- results_2024-02-29T19-25-05.731431.parquet
- split: latest
path:
- results_2024-02-29T19-25-05.731431.parquet
---
# Dataset Card for Evaluation run of FuseAI/FuseChat-7B-Slerp
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [FuseAI/FuseChat-7B-Slerp](https://huggingface.co/FuseAI/FuseChat-7B-Slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_FuseAI__FuseChat-7B-Slerp",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-29T19:25:05.731431](https://huggingface.co/datasets/open-llm-leaderboard/details_FuseAI__FuseChat-7B-Slerp/blob/main/results_2024-02-29T19-25-05.731431.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6405573713281516,
"acc_stderr": 0.032222929251837,
"acc_norm": 0.6422501101500138,
"acc_norm_stderr": 0.032873048974703764,
"mc1": 0.3072215422276622,
"mc1_stderr": 0.016150201321323016,
"mc2": 0.45618408792342546,
"mc2_stderr": 0.015076169944832135
},
"harness|arc:challenge|25": {
"acc": 0.5793515358361775,
"acc_stderr": 0.014426211252508397,
"acc_norm": 0.6262798634812287,
"acc_norm_stderr": 0.014137708601759093
},
"harness|hellaswag|10": {
"acc": 0.6495717984465246,
"acc_stderr": 0.004761289867046068,
"acc_norm": 0.841665006970723,
"acc_norm_stderr": 0.003643087529213717
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5777777777777777,
"acc_stderr": 0.04266763404099582,
"acc_norm": 0.5777777777777777,
"acc_norm_stderr": 0.04266763404099582
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6842105263157895,
"acc_stderr": 0.0378272898086547,
"acc_norm": 0.6842105263157895,
"acc_norm_stderr": 0.0378272898086547
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.63,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.63,
"acc_norm_stderr": 0.048523658709391
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6830188679245283,
"acc_stderr": 0.02863723563980089,
"acc_norm": 0.6830188679245283,
"acc_norm_stderr": 0.02863723563980089
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.75,
"acc_stderr": 0.03621034121889507,
"acc_norm": 0.75,
"acc_norm_stderr": 0.03621034121889507
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6473988439306358,
"acc_stderr": 0.036430371689585475,
"acc_norm": 0.6473988439306358,
"acc_norm_stderr": 0.036430371689585475
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4019607843137255,
"acc_stderr": 0.04878608714466996,
"acc_norm": 0.4019607843137255,
"acc_norm_stderr": 0.04878608714466996
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.74,
"acc_stderr": 0.04408440022768079,
"acc_norm": 0.74,
"acc_norm_stderr": 0.04408440022768079
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5702127659574469,
"acc_stderr": 0.03236214467715564,
"acc_norm": 0.5702127659574469,
"acc_norm_stderr": 0.03236214467715564
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5526315789473685,
"acc_stderr": 0.04677473004491199,
"acc_norm": 0.5526315789473685,
"acc_norm_stderr": 0.04677473004491199
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5448275862068965,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.5448275862068965,
"acc_norm_stderr": 0.04149886942192117
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.025487187147859375,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.025487187147859375
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5396825396825397,
"acc_stderr": 0.04458029125470973,
"acc_norm": 0.5396825396825397,
"acc_norm_stderr": 0.04458029125470973
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.27,
"acc_stderr": 0.0446196043338474,
"acc_norm": 0.27,
"acc_norm_stderr": 0.0446196043338474
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7870967741935484,
"acc_stderr": 0.02328766512726854,
"acc_norm": 0.7870967741935484,
"acc_norm_stderr": 0.02328766512726854
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.49261083743842365,
"acc_stderr": 0.03517603540361008,
"acc_norm": 0.49261083743842365,
"acc_norm_stderr": 0.03517603540361008
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7636363636363637,
"acc_stderr": 0.03317505930009182,
"acc_norm": 0.7636363636363637,
"acc_norm_stderr": 0.03317505930009182
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.02962022787479048,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.02962022787479048
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9222797927461139,
"acc_stderr": 0.019321805557223137,
"acc_norm": 0.9222797927461139,
"acc_norm_stderr": 0.019321805557223137
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6641025641025641,
"acc_stderr": 0.02394672474156397,
"acc_norm": 0.6641025641025641,
"acc_norm_stderr": 0.02394672474156397
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.028897748741131143,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.028897748741131143
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6554621848739496,
"acc_stderr": 0.030868682604121622,
"acc_norm": 0.6554621848739496,
"acc_norm_stderr": 0.030868682604121622
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943343,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943343
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8477064220183487,
"acc_stderr": 0.015405084393157074,
"acc_norm": 0.8477064220183487,
"acc_norm_stderr": 0.015405084393157074
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5231481481481481,
"acc_stderr": 0.03406315360711507,
"acc_norm": 0.5231481481481481,
"acc_norm_stderr": 0.03406315360711507
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7892156862745098,
"acc_stderr": 0.02862654791243742,
"acc_norm": 0.7892156862745098,
"acc_norm_stderr": 0.02862654791243742
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8059071729957806,
"acc_stderr": 0.025744902532290916,
"acc_norm": 0.8059071729957806,
"acc_norm_stderr": 0.025744902532290916
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.7085201793721974,
"acc_stderr": 0.030500283176545847,
"acc_norm": 0.7085201793721974,
"acc_norm_stderr": 0.030500283176545847
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7480916030534351,
"acc_stderr": 0.03807387116306086,
"acc_norm": 0.7480916030534351,
"acc_norm_stderr": 0.03807387116306086
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7520661157024794,
"acc_stderr": 0.03941897526516303,
"acc_norm": 0.7520661157024794,
"acc_norm_stderr": 0.03941897526516303
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7685185185185185,
"acc_stderr": 0.04077494709252627,
"acc_norm": 0.7685185185185185,
"acc_norm_stderr": 0.04077494709252627
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.754601226993865,
"acc_stderr": 0.03380939813943354,
"acc_norm": 0.754601226993865,
"acc_norm_stderr": 0.03380939813943354
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.44642857142857145,
"acc_stderr": 0.04718471485219588,
"acc_norm": 0.44642857142857145,
"acc_norm_stderr": 0.04718471485219588
},
"harness|hendrycksTest-management|5": {
"acc": 0.8058252427184466,
"acc_stderr": 0.03916667762822583,
"acc_norm": 0.8058252427184466,
"acc_norm_stderr": 0.03916667762822583
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8717948717948718,
"acc_stderr": 0.021901905115073325,
"acc_norm": 0.8717948717948718,
"acc_norm_stderr": 0.021901905115073325
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8135376756066411,
"acc_stderr": 0.013927751372001512,
"acc_norm": 0.8135376756066411,
"acc_norm_stderr": 0.013927751372001512
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7225433526011561,
"acc_stderr": 0.024105712607754307,
"acc_norm": 0.7225433526011561,
"acc_norm_stderr": 0.024105712607754307
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.39888268156424583,
"acc_stderr": 0.01637696614261008,
"acc_norm": 0.39888268156424583,
"acc_norm_stderr": 0.01637696614261008
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7320261437908496,
"acc_stderr": 0.025360603796242557,
"acc_norm": 0.7320261437908496,
"acc_norm_stderr": 0.025360603796242557
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7170418006430869,
"acc_stderr": 0.02558306248998481,
"acc_norm": 0.7170418006430869,
"acc_norm_stderr": 0.02558306248998481
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.02492200116888633,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.02492200116888633
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4645390070921986,
"acc_stderr": 0.029752389657427047,
"acc_norm": 0.4645390070921986,
"acc_norm_stderr": 0.029752389657427047
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4804432855280313,
"acc_stderr": 0.012760464028289299,
"acc_norm": 0.4804432855280313,
"acc_norm_stderr": 0.012760464028289299
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.7022058823529411,
"acc_stderr": 0.027778298701545443,
"acc_norm": 0.7022058823529411,
"acc_norm_stderr": 0.027778298701545443
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6650326797385621,
"acc_stderr": 0.019094228167000318,
"acc_norm": 0.6650326797385621,
"acc_norm_stderr": 0.019094228167000318
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6636363636363637,
"acc_stderr": 0.04525393596302506,
"acc_norm": 0.6636363636363637,
"acc_norm_stderr": 0.04525393596302506
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7224489795918367,
"acc_stderr": 0.028666857790274645,
"acc_norm": 0.7224489795918367,
"acc_norm_stderr": 0.028666857790274645
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8258706467661692,
"acc_stderr": 0.026814951200421603,
"acc_norm": 0.8258706467661692,
"acc_norm_stderr": 0.026814951200421603
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.88,
"acc_stderr": 0.03265986323710906,
"acc_norm": 0.88,
"acc_norm_stderr": 0.03265986323710906
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5301204819277109,
"acc_stderr": 0.03885425420866767,
"acc_norm": 0.5301204819277109,
"acc_norm_stderr": 0.03885425420866767
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8538011695906432,
"acc_stderr": 0.027097290118070806,
"acc_norm": 0.8538011695906432,
"acc_norm_stderr": 0.027097290118070806
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3072215422276622,
"mc1_stderr": 0.016150201321323016,
"mc2": 0.45618408792342546,
"mc2_stderr": 0.015076169944832135
},
"harness|winogrande|5": {
"acc": 0.7947908445146015,
"acc_stderr": 0.01135031570746207
},
"harness|gsm8k|5": {
"acc": 0.6254738438210766,
"acc_stderr": 0.013331774158491391
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
timpal0l/OpenHermes-2.5-sv | ---
language:
- sv
dataset_info:
features:
- name: category
dtype: string
- name: model_name
dtype: 'null'
- name: idx
dtype: 'null'
- name: custom_instruction
dtype: 'null'
- name: topic
dtype: 'null'
- name: system_prompt
dtype: 'null'
- name: conversations
list:
- name: from
dtype: string
- name: value
dtype: string
- name: weight
dtype: 'null'
- name: source
dtype: string
- name: hash
dtype: 'null'
- name: avatarUrl
dtype: 'null'
- name: model
dtype: 'null'
- name: language
dtype: 'null'
- name: id
dtype: string
- name: views
dtype: 'null'
- name: title
dtype: 'null'
- name: skip_prompt_formatting
dtype: bool
- name: __index_level_0__
dtype: int64
splits:
- name: train
num_bytes: 76599171
num_examples: 84765
download_size: 46192507
dataset_size: 76599171
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
license: mit
task_categories:
- text-generation
tags:
- instruct
datasets:
- timpal0l/OpenHermes-2.5-sv
- teknium/OpenHermes-2.5
---
# OpenHermes-2.5-sv
This is a machine translated instruct dataset from [OpenHermes-2.5](https://huggingface.co/datasets/teknium/OpenHermes-2.5).
The [facebook/seamless-m4t-v2-large](https://huggingface.co/facebook/seamless-m4t-v2-large) was used, and some post filtering is done to remove repetitive texts that occurred due to translation errors.
# Example data:
```json
[
{
"from":"human",
"value":"Vilket naturfenomen, som orsakas av att ljus reflekteras och bryts genom vattendroppar, resulterar i en färgglad båge på himlen?",
"weight":null
},
{
"from":"gpt",
"value":"En regnbåge är ett naturligt fenomen som uppstår när ljus reflekteras och bryts genom vattendroppar i jordens atmosfär. Denna spridning av ljus resulterar i en färgglad båge på himlen, vanligtvis med färgerna rött, orange, gult, grönt, blått, indigo och violett. Regnbågar dyker oftast upp efter regnfall och under soliga intervaller.",
"weight":null
}
]
``` |
davanstrien/illustrated_ads_images_labels_only | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': text-only
'1': illustrations
splits:
- name: train
num_bytes: 47581375
num_examples: 549
download_size: 47599430
dataset_size: 47581375
size_categories:
- n<1K
---
# Dataset Card for "illustrated_ads_images_labels_only"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yejeekang/legal_sum_instruction | ---
license: afl-3.0
---
|
CyberHarem/sakurai_yumeko_theidolmster | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of sakurai_yumeko (THE iDOLM@STER)
This is the dataset of sakurai_yumeko (THE iDOLM@STER), containing 69 images and their tags.
The core tags of this character are `long_hair, green_eyes, brown_hair, side_ponytail, breasts`, which are pruned in this dataset.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
## List of Packages
| Name | Images | Size | Download | Type | Description |
|:-----------------|---------:|:----------|:-----------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------|
| raw | 69 | 35.23 MiB | [Download](https://huggingface.co/datasets/CyberHarem/sakurai_yumeko_theidolmster/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). |
| 800 | 69 | 30.48 MiB | [Download](https://huggingface.co/datasets/CyberHarem/sakurai_yumeko_theidolmster/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. |
| stage3-p480-800 | 114 | 50.37 MiB | [Download](https://huggingface.co/datasets/CyberHarem/sakurai_yumeko_theidolmster/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
| 1200 | 69 | 34.78 MiB | [Download](https://huggingface.co/datasets/CyberHarem/sakurai_yumeko_theidolmster/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. |
| stage3-p480-1200 | 114 | 55.82 MiB | [Download](https://huggingface.co/datasets/CyberHarem/sakurai_yumeko_theidolmster/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
### Load Raw Dataset with Waifuc
We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code
```python
import os
import zipfile
from huggingface_hub import hf_hub_download
from waifuc.source import LocalSource
# download raw archive file
zip_file = hf_hub_download(
repo_id='CyberHarem/sakurai_yumeko_theidolmster',
repo_type='dataset',
filename='dataset-raw.zip',
)
# extract files to your directory
dataset_dir = 'dataset_dir'
os.makedirs(dataset_dir, exist_ok=True)
with zipfile.ZipFile(zip_file, 'r') as zf:
zf.extractall(dataset_dir)
# load the dataset with waifuc
source = LocalSource(dataset_dir)
for item in source:
print(item.image, item.meta['filename'], item.meta['tags'])
```
## List of Clusters
List of tag clustering result, maybe some outfits can be mined here.
### Raw Text Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------|
| 0 | 24 |  |  |  |  |  | 1girl, detached_sleeves, bare_shoulders, blush, solo, midriff, star_(symbol), necklace, navel, smile, skirt, striped, clothes_around_waist, open_mouth |
| 1 | 8 |  |  |  |  |  | boots, midriff, thighhighs, navel, skirt, 3girls, clothes_around_waist, crop_top, smile, 1girl, 2girls, detached_sleeves, open_mouth, star_(symbol), striped |
### Table Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | detached_sleeves | bare_shoulders | blush | solo | midriff | star_(symbol) | necklace | navel | smile | skirt | striped | clothes_around_waist | open_mouth | boots | thighhighs | 3girls | crop_top | 2girls |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-------------------|:-----------------|:--------|:-------|:----------|:----------------|:-----------|:--------|:--------|:--------|:----------|:-----------------------|:-------------|:--------|:-------------|:---------|:-----------|:---------|
| 0 | 24 |  |  |  |  |  | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | |
| 1 | 8 |  |  |  |  |  | X | X | | | | X | X | | X | X | X | X | X | X | X | X | X | X | X |
|
wsyar/llmbG11 | ---
license: apache-2.0
---
|
atharvapawar/part5_dataSorted_Diversevul_llama2_dataset | ---
license: mit
---
|
babs/vox-populi-subset | ---
dataset_info:
features:
- name: source_id
dtype: string
- name: target_id
dtype: string
- name: source_audio
dtype: audio
- name: target_audio
dtype: audio
- name: target_units
sequence: int32
splits:
- name: train
num_bytes: 459597811.0
num_examples: 1000
download_size: 457570458
dataset_size: 459597811.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
THis repo contains about 100 rows of random speech to speech vox populi data. can be use for quick testing of code and pipelines |
KatoHF/ultrafeedback_binarized | ---
dataset_info:
features:
- name: text
dtype: string
- name: label
dtype: int64
splits:
- name: train
num_bytes: 493667577
num_examples: 225328
download_size: 184177111
dataset_size: 493667577
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Locutusque/hercules-v2.5 | ---
language:
- en
license: apache-2.0
size_categories:
- 1M<n<10M
task_categories:
- text-generation
- question-answering
- conversational
dataset_info:
features:
- name: conversations
list:
- name: from
dtype: string
- name: value
dtype: string
- name: source
dtype: string
- name: __index_level_0__
dtype: int64
splits:
- name: train
num_bytes: 3257199688.0
num_examples: 1810725
download_size: 1488468818
dataset_size: 3257199688.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
### Dataset Card for Hercules-v2.5

#### Overview
**Dataset Name:** Hercules-v2.5
**Version:** 2.5
**Date of Release:** February 9, 2024
**Size:** 1,810,725
**Data Sources:**
Hercules-v2.5 is an enriched instruction dataset derived from Hercules-v2.0, aimed at fixing a critical oversight that was not caught and improving reasoning, math, and truth capabilities. The oversight was that the functions were not provided in the function calling examples of the previous dataset, leading to severe hallucinations. The dataset amalgamates contributions from various data sources, with a strong emphasis on Biology, Physics, Medicine, Math, Computer Science, Instruction Following, Function Calling, and Roleplay. The data sources used to construct Hercules-v2.0 include:
- cognitivecomputations/dolphin (first 300k examples)
- Evol Instruct 70K && 140K
- teknium/GPT4-LLM-Cleaned
- jondurbin/airoboros-3.2
- AlekseyKorshuk/camel-chatml
- CollectiveCognition/chats-data-2023-09-22
- Nebulous/lmsys-chat-1m-smortmodelsonly
- glaiveai/glaive-code-assistant-v2
- glaiveai/glaive-code-assistant
- glaiveai/glaive-function-calling-v2
- garage-bAInd/Open-Platypus
- meta-math/MetaMathQA
- teknium/GPTeacher-General-Instruct
- GPTeacher roleplay datasets
- BI55/MedText
- pubmed_qa labeled subset
- Unnatural Instructions
- M4-ai/LDJnr_combined_inout_format
- CollectiveCognition/chats-data-2023-09-27
- CollectiveCognition/chats-data-2023-10-16
This dataset is written with mostly GPT-4, but other models such as Claude-1, Claude-1-instant, Claude-2, Claude-2.1, and GPT-3.5-Turbo can be found in the data.
Curation of this dataset was based on findings from hercules-v2.0.
Warning: This dataset contains toxic examples. Use at your own risk.
#### Description
Hercules-v2.5 is designed to serve as a comprehensive and multifaceted dataset tailored for the development and evaluation of advanced machine learning models, particularly those focused on natural language understanding and processing in specialized domains. It includes a variety of formats, such as question-answering pairs, dialogues, function calls, and roleplay scenarios, providing robust training material for models to handle complex instructions and execute function calls.
#### Data Format
The dataset includes JSON-formatted entries, with a unique structure to incorporate function calling examples. Each entry is composed of a sequence of interactions, each tagged with "from" to indicate the speaker (human, function-call, function-response, or gpt) and "value" to present the content or payload of the interaction. For example:
```json
[
{ "from": "human", "value": "Hi, I need to convert a temperature from Celsius to Fahrenheit. The temperature is 30 degrees Celsius." },
{ "from": "function-call", "value": "{\"name\": \"convert_temperature\", \"arguments\": '{\"temperature\": 30, \"from_unit\": \"Celsius\", \"to_unit\": \"Fahrenheit\"}'}" },
{ "from": "function-response", "value": "{\"converted_temperature\": 86}" },
{ "from": "gpt", "value": "The converted temperature from 30 degrees Celsius to Fahrenheit is 86 degrees Fahrenheit." }
]
```
#### Usage
The Hercules-v2.5 dataset is designed for training and evaluating AI systems in their ability to follow instructions, execute function calls, and interact in roleplay scenarios across various scientific and technical disciplines. Researchers and developers can leverage this dataset for:
- Enhancing language models' understanding of complex topics.
- Improving the accuracy of function-call executions within conversational agents.
- Developing models capable of engaging in educational and informative dialogue.
- Benchmarking systems on their ability to follow intricate instructions and provide accurate responses.
#### Licensing
This dataset is released under the apache-2.0 license.
#### Citation
Researchers using Hercules-v2.5 in their work should cite the dataset as follows:
```
@misc{sebastian_gabarain_2024,
title = {Hercules-v2.0: An Instruction Dataset for Specialized Domains},
author = {Sebastian Gabarain},
publisher = {HuggingFace},
year = {2024},
doi = {10.57967/hf/1744}
url = {https://huggingface.co/datasets/Locutusque/hercules-v2.0}
}
```
#### Acknowledgements
Hercules-v2.5 was made possible thanks to the contributions from various datasets and the community's efforts in compiling and refining data to create a rich and diverse instruction set. Special thanks go to the creator of OpenHermes-2.5 and all the data sources listed above.
#### Version History
v2.5: Current version with fixed function-calling oversight.
v2.0: Enhanced diversity and scope.
v1.0: Initial release. |
doushabao4766/resume_zh_ner | ---
dataset_info:
features:
- name: id
dtype: int64
- name: tokens
sequence: string
- name: ner_tags
sequence: int64
splits:
- name: train
num_bytes: 1893971
num_examples: 3821
- name: test
num_bytes: 231104
num_examples: 477
- name: validation
num_bytes: 212262
num_examples: 463
download_size: 0
dataset_size: 2337337
---
# Dataset Card for "resume_zh_ner"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
distilled-from-one-sec-cv12/chunk_186 | ---
dataset_info:
features:
- name: logits
sequence: float32
- name: mfcc
sequence:
sequence: float64
splits:
- name: train
num_bytes: 646724376
num_examples: 126018
download_size: 653514673
dataset_size: 646724376
---
# Dataset Card for "chunk_186"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
drmworld/uird_t5-small_v.0.1 | ---
dataset_info:
features:
- name: character
struct:
- name: age
dtype: int64
- name: bio
dtype: string
- name: code
dtype: string
- name: country_flag
dtype: string
- name: created_at
dtype: string
- name: deleted_at
dtype: 'null'
- name: description
dtype: string
- name: display_name
dtype: string
- name: eleven_voice_id
dtype: string
- name: eleven_voice_settings
struct:
- name: similarity_boost
dtype: float64
- name: stability
dtype: float64
- name: style
dtype: int64
- name: use_speaker_boost
dtype: bool
- name: height
dtype: string
- name: hobbies
sequence: string
- name: id
dtype: string
- name: image_cost
dtype: int64
- name: is_ready_chat_content
dtype: bool
- name: is_ready_llm
dtype: bool
- name: is_ready_profile_content
dtype: bool
- name: is_ready_profile_data
dtype: bool
- name: level1_name
dtype: string
- name: level1_value
dtype: int64
- name: level2_name
dtype: string
- name: level2_value
dtype: int64
- name: level3_name
dtype: string
- name: level3_value
dtype: int64
- name: llm_mw_url
dtype: 'null'
- name: location
dtype: string
- name: metadata
struct:
- name: age
dtype: string
- name: birthday
dtype: string
- name: body type
dtype: string
- name: description
dtype: string
- name: education
dtype: string
- name: example 2 / notes
dtype: string
- name: eye color
dtype: string
- name: family
dtype: string
- name: favorite destinations
dtype: string
- name: favorite food
dtype: string
- name: favorite movies / tv
dtype: string
- name: favorite music / artist
dtype: string
- name: favorite sports / team
dtype: string
- name: first_name
dtype: string
- name: gender
dtype: string
- name: hair color
dtype: string
- name: height
dtype: string
- name: hobbies
dtype: string
- name: ig / twitter bio
dtype: string
- name: location
dtype: string
- name: name
dtype: string
- name: nationality
dtype: string
- name: occupation
dtype: string
- name: personality
dtype: string
- name: pets
dtype: string
- name: pronoun
dtype: string
- name: pronoun_heshe
dtype: string
- name: turn ons
dtype: string
- name: website bio
dtype: string
- name: website headline
dtype: string
- name: zodiac sign
dtype: string
- name: nature
dtype: string
- name: ordering
dtype: int64
- name: preferences
sequence: string
- name: updated_at
dtype: string
- name: welcome_message
dtype: string
- name: welcome_message_reply_suggestion
dtype: string
- name: user
struct:
- name: gender
dtype: string
- name: hobbies
sequence: string
- name: turn_ons
sequence: string
- name: history
list:
- name: id
dtype: int64
- name: llm_id
dtype: int64
- name: normal_request
sequence: string
- name: normal_tag
dtype: string
- name: picture_camera
dtype: 'null'
- name: picture_category
dtype: string
- name: picture_clothes
dtype: string
- name: picture_location
dtype: string
- name: picture_request
sequence: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 26654
num_examples: 1
download_size: 68244
dataset_size: 26654
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
mtkinit/mtkinit_TCB_sentiment_dataset | ---
pretty_name: mtkinit/TCB-sentiment-dataset
---
# mtkinit/TCB-sentiment-dataset
Created from AIOD platform |
DK9/UV-ControlNet | ---
license: apache-2.0
---
|
khalidalt/SANAD | ---
license: cc-by-4.0
---
# Dataset Card for SANAD
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:https://data.mendeley.com/datasets/57zpx667y9/2**
### Dataset Summary
SANAD Dataset is a large collection of Arabic news articles that can be used in different Arabic NLP tasks such as Text Classification and Word Embedding. The articles were collected using Python scripts written specifically for three popular news websites: AlKhaleej, AlArabiya and Akhbarona. All datasets have seven categories [Culture, Finance, Medical, Politics, Religion, Sports and Tech], except AlArabiya which doesn’t have [Religion]. SANAD contains a total number of 190k+ articles.
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
license: cc-by-4.0
### Citation Information
```
@article{einea2019sanad,
title={Sanad: Single-label arabic news articles dataset for automatic text categorization},
author={Einea, Omar and Elnagar, Ashraf and Al Debsi, Ridhwan},
journal={Data in brief},
volume={25},
pages={104076},
year={2019},
publisher={Elsevier}
}
```
### Contributions
|
Omerturktt/omerturkai | ---
license: openrail
---
|
open-llm-leaderboard/details_teilomillet__MiniMerlin-3b-v0.1 | ---
pretty_name: Evaluation run of teilomillet/MiniMerlin-3b-v0.1
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [teilomillet/MiniMerlin-3b-v0.1](https://huggingface.co/teilomillet/MiniMerlin-3b-v0.1)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_teilomillet__MiniMerlin-3b-v0.1\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-12-13T12:30:09.463717](https://huggingface.co/datasets/open-llm-leaderboard/details_teilomillet__MiniMerlin-3b-v0.1/blob/main/results_2023-12-13T12-30-09.463717.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.42829802423091123,\n\
\ \"acc_stderr\": 0.034419009383078604,\n \"acc_norm\": 0.4345596062931712,\n\
\ \"acc_norm_stderr\": 0.035301959046270974,\n \"mc1\": 0.3023255813953488,\n\
\ \"mc1_stderr\": 0.016077509266133022,\n \"mc2\": 0.49647374974901654,\n\
\ \"mc2_stderr\": 0.015915065186614973\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.38139931740614336,\n \"acc_stderr\": 0.014194389086685261,\n\
\ \"acc_norm\": 0.4069965870307167,\n \"acc_norm_stderr\": 0.014356399418009131\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4343756223859789,\n\
\ \"acc_stderr\": 0.004946617138983514,\n \"acc_norm\": 0.5406293567018522,\n\
\ \"acc_norm_stderr\": 0.004973280417705513\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \
\ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.43703703703703706,\n\
\ \"acc_stderr\": 0.042849586397533994,\n \"acc_norm\": 0.43703703703703706,\n\
\ \"acc_norm_stderr\": 0.042849586397533994\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.040463368839782486,\n\
\ \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.040463368839782486\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.38,\n\
\ \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \
\ \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.47924528301886793,\n \"acc_stderr\": 0.030746349975723463,\n\
\ \"acc_norm\": 0.47924528301886793,\n \"acc_norm_stderr\": 0.030746349975723463\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4722222222222222,\n\
\ \"acc_stderr\": 0.04174752578923185,\n \"acc_norm\": 0.4722222222222222,\n\
\ \"acc_norm_stderr\": 0.04174752578923185\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\"\
: 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542126,\n \
\ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542126\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3872832369942196,\n\
\ \"acc_stderr\": 0.037143259063020656,\n \"acc_norm\": 0.3872832369942196,\n\
\ \"acc_norm_stderr\": 0.037143259063020656\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.04336432707993177,\n\
\ \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.04336432707993177\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n\
\ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.3276595744680851,\n \"acc_stderr\": 0.030683020843231,\n\
\ \"acc_norm\": 0.3276595744680851,\n \"acc_norm_stderr\": 0.030683020843231\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n\
\ \"acc_stderr\": 0.04185774424022056,\n \"acc_norm\": 0.2719298245614035,\n\
\ \"acc_norm_stderr\": 0.04185774424022056\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.43448275862068964,\n \"acc_stderr\": 0.04130740879555497,\n\
\ \"acc_norm\": 0.43448275862068964,\n \"acc_norm_stderr\": 0.04130740879555497\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.2566137566137566,\n \"acc_stderr\": 0.022494510767503154,\n \"\
acc_norm\": 0.2566137566137566,\n \"acc_norm_stderr\": 0.022494510767503154\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.25396825396825395,\n\
\ \"acc_stderr\": 0.03893259610604673,\n \"acc_norm\": 0.25396825396825395,\n\
\ \"acc_norm_stderr\": 0.03893259610604673\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5161290322580645,\n\
\ \"acc_stderr\": 0.028429203176724555,\n \"acc_norm\": 0.5161290322580645,\n\
\ \"acc_norm_stderr\": 0.028429203176724555\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.35467980295566504,\n \"acc_stderr\": 0.0336612448905145,\n\
\ \"acc_norm\": 0.35467980295566504,\n \"acc_norm_stderr\": 0.0336612448905145\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\"\
: 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.5212121212121212,\n \"acc_stderr\": 0.03900828913737302,\n\
\ \"acc_norm\": 0.5212121212121212,\n \"acc_norm_stderr\": 0.03900828913737302\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.5252525252525253,\n \"acc_stderr\": 0.03557806245087314,\n \"\
acc_norm\": 0.5252525252525253,\n \"acc_norm_stderr\": 0.03557806245087314\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.5647668393782384,\n \"acc_stderr\": 0.035780381650085846,\n\
\ \"acc_norm\": 0.5647668393782384,\n \"acc_norm_stderr\": 0.035780381650085846\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.382051282051282,\n \"acc_stderr\": 0.024635549163908227,\n \
\ \"acc_norm\": 0.382051282051282,\n \"acc_norm_stderr\": 0.024635549163908227\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.22962962962962963,\n \"acc_stderr\": 0.025644108639267613,\n \
\ \"acc_norm\": 0.22962962962962963,\n \"acc_norm_stderr\": 0.025644108639267613\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.031041941304059274,\n\
\ \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.031041941304059274\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"\
acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.5614678899082569,\n \"acc_stderr\": 0.021274713073954572,\n \"\
acc_norm\": 0.5614678899082569,\n \"acc_norm_stderr\": 0.021274713073954572\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.25,\n \"acc_stderr\": 0.029531221160930918,\n \"acc_norm\": 0.25,\n\
\ \"acc_norm_stderr\": 0.029531221160930918\n },\n \"harness|hendrycksTest-high_school_us_history|5\"\
: {\n \"acc\": 0.5441176470588235,\n \"acc_stderr\": 0.03495624522015475,\n\
\ \"acc_norm\": 0.5441176470588235,\n \"acc_norm_stderr\": 0.03495624522015475\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.5907172995780591,\n \"acc_stderr\": 0.032007041833595914,\n \
\ \"acc_norm\": 0.5907172995780591,\n \"acc_norm_stderr\": 0.032007041833595914\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.4663677130044843,\n\
\ \"acc_stderr\": 0.033481800170603065,\n \"acc_norm\": 0.4663677130044843,\n\
\ \"acc_norm_stderr\": 0.033481800170603065\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.5267175572519084,\n \"acc_stderr\": 0.04379024936553894,\n\
\ \"acc_norm\": 0.5267175572519084,\n \"acc_norm_stderr\": 0.04379024936553894\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.5867768595041323,\n \"acc_stderr\": 0.04495087843548408,\n \"\
acc_norm\": 0.5867768595041323,\n \"acc_norm_stderr\": 0.04495087843548408\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5,\n\
\ \"acc_stderr\": 0.04833682445228318,\n \"acc_norm\": 0.5,\n \
\ \"acc_norm_stderr\": 0.04833682445228318\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.5030674846625767,\n \"acc_stderr\": 0.03928297078179663,\n\
\ \"acc_norm\": 0.5030674846625767,\n \"acc_norm_stderr\": 0.03928297078179663\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3482142857142857,\n\
\ \"acc_stderr\": 0.04521829902833586,\n \"acc_norm\": 0.3482142857142857,\n\
\ \"acc_norm_stderr\": 0.04521829902833586\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.5825242718446602,\n \"acc_stderr\": 0.048828405482122375,\n\
\ \"acc_norm\": 0.5825242718446602,\n \"acc_norm_stderr\": 0.048828405482122375\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6709401709401709,\n\
\ \"acc_stderr\": 0.03078232157768817,\n \"acc_norm\": 0.6709401709401709,\n\
\ \"acc_norm_stderr\": 0.03078232157768817\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \
\ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.49936143039591313,\n\
\ \"acc_stderr\": 0.01787994891443168,\n \"acc_norm\": 0.49936143039591313,\n\
\ \"acc_norm_stderr\": 0.01787994891443168\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.4653179190751445,\n \"acc_stderr\": 0.026854257928258893,\n\
\ \"acc_norm\": 0.4653179190751445,\n \"acc_norm_stderr\": 0.026854257928258893\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n\
\ \"acc_stderr\": 0.014422292204808862,\n \"acc_norm\": 0.24692737430167597,\n\
\ \"acc_norm_stderr\": 0.014422292204808862\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.5163398692810458,\n \"acc_stderr\": 0.028614624752805434,\n\
\ \"acc_norm\": 0.5163398692810458,\n \"acc_norm_stderr\": 0.028614624752805434\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.4855305466237942,\n\
\ \"acc_stderr\": 0.02838619808417768,\n \"acc_norm\": 0.4855305466237942,\n\
\ \"acc_norm_stderr\": 0.02838619808417768\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.45987654320987653,\n \"acc_stderr\": 0.027731022753539274,\n\
\ \"acc_norm\": 0.45987654320987653,\n \"acc_norm_stderr\": 0.027731022753539274\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.3475177304964539,\n \"acc_stderr\": 0.028406627809590947,\n \
\ \"acc_norm\": 0.3475177304964539,\n \"acc_norm_stderr\": 0.028406627809590947\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3533246414602347,\n\
\ \"acc_stderr\": 0.012208408211082428,\n \"acc_norm\": 0.3533246414602347,\n\
\ \"acc_norm_stderr\": 0.012208408211082428\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.2757352941176471,\n \"acc_stderr\": 0.02714627193662517,\n\
\ \"acc_norm\": 0.2757352941176471,\n \"acc_norm_stderr\": 0.02714627193662517\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.4133986928104575,\n \"acc_stderr\": 0.01992211568278667,\n \
\ \"acc_norm\": 0.4133986928104575,\n \"acc_norm_stderr\": 0.01992211568278667\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5181818181818182,\n\
\ \"acc_stderr\": 0.04785964010794916,\n \"acc_norm\": 0.5181818181818182,\n\
\ \"acc_norm_stderr\": 0.04785964010794916\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.5346938775510204,\n \"acc_stderr\": 0.03193207024425314,\n\
\ \"acc_norm\": 0.5346938775510204,\n \"acc_norm_stderr\": 0.03193207024425314\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5771144278606966,\n\
\ \"acc_stderr\": 0.034932317774212816,\n \"acc_norm\": 0.5771144278606966,\n\
\ \"acc_norm_stderr\": 0.034932317774212816\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001974,\n \
\ \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001974\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.43373493975903615,\n\
\ \"acc_stderr\": 0.03858158940685516,\n \"acc_norm\": 0.43373493975903615,\n\
\ \"acc_norm_stderr\": 0.03858158940685516\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.5029239766081871,\n \"acc_stderr\": 0.03834759370936839,\n\
\ \"acc_norm\": 0.5029239766081871,\n \"acc_norm_stderr\": 0.03834759370936839\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3023255813953488,\n\
\ \"mc1_stderr\": 0.016077509266133022,\n \"mc2\": 0.49647374974901654,\n\
\ \"mc2_stderr\": 0.015915065186614973\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.6053670086819258,\n \"acc_stderr\": 0.013736915172371888\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.013646702047005308,\n \
\ \"acc_stderr\": 0.003195747075480817\n }\n}\n```"
repo_url: https://huggingface.co/teilomillet/MiniMerlin-3b-v0.1
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|arc:challenge|25_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|gsm8k|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hellaswag|10_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-12-13T12-30-09.463717.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-management|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-virology|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|truthfulqa:mc|0_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-12-13T12-30-09.463717.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- '**/details_harness|winogrande|5_2023-12-13T12-30-09.463717.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-12-13T12-30-09.463717.parquet'
- config_name: results
data_files:
- split: 2023_12_13T12_30_09.463717
path:
- results_2023-12-13T12-30-09.463717.parquet
- split: latest
path:
- results_2023-12-13T12-30-09.463717.parquet
---
# Dataset Card for Evaluation run of teilomillet/MiniMerlin-3b-v0.1
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [teilomillet/MiniMerlin-3b-v0.1](https://huggingface.co/teilomillet/MiniMerlin-3b-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_teilomillet__MiniMerlin-3b-v0.1",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-12-13T12:30:09.463717](https://huggingface.co/datasets/open-llm-leaderboard/details_teilomillet__MiniMerlin-3b-v0.1/blob/main/results_2023-12-13T12-30-09.463717.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.42829802423091123,
"acc_stderr": 0.034419009383078604,
"acc_norm": 0.4345596062931712,
"acc_norm_stderr": 0.035301959046270974,
"mc1": 0.3023255813953488,
"mc1_stderr": 0.016077509266133022,
"mc2": 0.49647374974901654,
"mc2_stderr": 0.015915065186614973
},
"harness|arc:challenge|25": {
"acc": 0.38139931740614336,
"acc_stderr": 0.014194389086685261,
"acc_norm": 0.4069965870307167,
"acc_norm_stderr": 0.014356399418009131
},
"harness|hellaswag|10": {
"acc": 0.4343756223859789,
"acc_stderr": 0.004946617138983514,
"acc_norm": 0.5406293567018522,
"acc_norm_stderr": 0.004973280417705513
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.43703703703703706,
"acc_stderr": 0.042849586397533994,
"acc_norm": 0.43703703703703706,
"acc_norm_stderr": 0.042849586397533994
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.4473684210526316,
"acc_stderr": 0.040463368839782486,
"acc_norm": 0.4473684210526316,
"acc_norm_stderr": 0.040463368839782486
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.38,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.38,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.47924528301886793,
"acc_stderr": 0.030746349975723463,
"acc_norm": 0.47924528301886793,
"acc_norm_stderr": 0.030746349975723463
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.4722222222222222,
"acc_stderr": 0.04174752578923185,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.04174752578923185
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.45,
"acc_stderr": 0.049999999999999996,
"acc_norm": 0.45,
"acc_norm_stderr": 0.049999999999999996
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542126,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542126
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.3872832369942196,
"acc_stderr": 0.037143259063020656,
"acc_norm": 0.3872832369942196,
"acc_norm_stderr": 0.037143259063020656
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.04336432707993177,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.04336432707993177
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.3276595744680851,
"acc_stderr": 0.030683020843231,
"acc_norm": 0.3276595744680851,
"acc_norm_stderr": 0.030683020843231
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.2719298245614035,
"acc_stderr": 0.04185774424022056,
"acc_norm": 0.2719298245614035,
"acc_norm_stderr": 0.04185774424022056
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.43448275862068964,
"acc_stderr": 0.04130740879555497,
"acc_norm": 0.43448275862068964,
"acc_norm_stderr": 0.04130740879555497
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.2566137566137566,
"acc_stderr": 0.022494510767503154,
"acc_norm": 0.2566137566137566,
"acc_norm_stderr": 0.022494510767503154
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.25396825396825395,
"acc_stderr": 0.03893259610604673,
"acc_norm": 0.25396825396825395,
"acc_norm_stderr": 0.03893259610604673
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.5161290322580645,
"acc_stderr": 0.028429203176724555,
"acc_norm": 0.5161290322580645,
"acc_norm_stderr": 0.028429203176724555
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.35467980295566504,
"acc_stderr": 0.0336612448905145,
"acc_norm": 0.35467980295566504,
"acc_norm_stderr": 0.0336612448905145
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.4,
"acc_stderr": 0.049236596391733084,
"acc_norm": 0.4,
"acc_norm_stderr": 0.049236596391733084
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.5212121212121212,
"acc_stderr": 0.03900828913737302,
"acc_norm": 0.5212121212121212,
"acc_norm_stderr": 0.03900828913737302
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.5252525252525253,
"acc_stderr": 0.03557806245087314,
"acc_norm": 0.5252525252525253,
"acc_norm_stderr": 0.03557806245087314
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.5647668393782384,
"acc_stderr": 0.035780381650085846,
"acc_norm": 0.5647668393782384,
"acc_norm_stderr": 0.035780381650085846
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.382051282051282,
"acc_stderr": 0.024635549163908227,
"acc_norm": 0.382051282051282,
"acc_norm_stderr": 0.024635549163908227
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.22962962962962963,
"acc_stderr": 0.025644108639267613,
"acc_norm": 0.22962962962962963,
"acc_norm_stderr": 0.025644108639267613
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.35294117647058826,
"acc_stderr": 0.031041941304059274,
"acc_norm": 0.35294117647058826,
"acc_norm_stderr": 0.031041941304059274
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.33112582781456956,
"acc_stderr": 0.038425817186598696,
"acc_norm": 0.33112582781456956,
"acc_norm_stderr": 0.038425817186598696
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.5614678899082569,
"acc_stderr": 0.021274713073954572,
"acc_norm": 0.5614678899082569,
"acc_norm_stderr": 0.021274713073954572
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.25,
"acc_stderr": 0.029531221160930918,
"acc_norm": 0.25,
"acc_norm_stderr": 0.029531221160930918
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.5441176470588235,
"acc_stderr": 0.03495624522015475,
"acc_norm": 0.5441176470588235,
"acc_norm_stderr": 0.03495624522015475
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.5907172995780591,
"acc_stderr": 0.032007041833595914,
"acc_norm": 0.5907172995780591,
"acc_norm_stderr": 0.032007041833595914
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.4663677130044843,
"acc_stderr": 0.033481800170603065,
"acc_norm": 0.4663677130044843,
"acc_norm_stderr": 0.033481800170603065
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.5267175572519084,
"acc_stderr": 0.04379024936553894,
"acc_norm": 0.5267175572519084,
"acc_norm_stderr": 0.04379024936553894
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.5867768595041323,
"acc_stderr": 0.04495087843548408,
"acc_norm": 0.5867768595041323,
"acc_norm_stderr": 0.04495087843548408
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.5,
"acc_stderr": 0.04833682445228318,
"acc_norm": 0.5,
"acc_norm_stderr": 0.04833682445228318
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.5030674846625767,
"acc_stderr": 0.03928297078179663,
"acc_norm": 0.5030674846625767,
"acc_norm_stderr": 0.03928297078179663
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.3482142857142857,
"acc_stderr": 0.04521829902833586,
"acc_norm": 0.3482142857142857,
"acc_norm_stderr": 0.04521829902833586
},
"harness|hendrycksTest-management|5": {
"acc": 0.5825242718446602,
"acc_stderr": 0.048828405482122375,
"acc_norm": 0.5825242718446602,
"acc_norm_stderr": 0.048828405482122375
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.6709401709401709,
"acc_stderr": 0.03078232157768817,
"acc_norm": 0.6709401709401709,
"acc_norm_stderr": 0.03078232157768817
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.49936143039591313,
"acc_stderr": 0.01787994891443168,
"acc_norm": 0.49936143039591313,
"acc_norm_stderr": 0.01787994891443168
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.4653179190751445,
"acc_stderr": 0.026854257928258893,
"acc_norm": 0.4653179190751445,
"acc_norm_stderr": 0.026854257928258893
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.24692737430167597,
"acc_stderr": 0.014422292204808862,
"acc_norm": 0.24692737430167597,
"acc_norm_stderr": 0.014422292204808862
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.5163398692810458,
"acc_stderr": 0.028614624752805434,
"acc_norm": 0.5163398692810458,
"acc_norm_stderr": 0.028614624752805434
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.4855305466237942,
"acc_stderr": 0.02838619808417768,
"acc_norm": 0.4855305466237942,
"acc_norm_stderr": 0.02838619808417768
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.45987654320987653,
"acc_stderr": 0.027731022753539274,
"acc_norm": 0.45987654320987653,
"acc_norm_stderr": 0.027731022753539274
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.3475177304964539,
"acc_stderr": 0.028406627809590947,
"acc_norm": 0.3475177304964539,
"acc_norm_stderr": 0.028406627809590947
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.3533246414602347,
"acc_stderr": 0.012208408211082428,
"acc_norm": 0.3533246414602347,
"acc_norm_stderr": 0.012208408211082428
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.2757352941176471,
"acc_stderr": 0.02714627193662517,
"acc_norm": 0.2757352941176471,
"acc_norm_stderr": 0.02714627193662517
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.4133986928104575,
"acc_stderr": 0.01992211568278667,
"acc_norm": 0.4133986928104575,
"acc_norm_stderr": 0.01992211568278667
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.5181818181818182,
"acc_stderr": 0.04785964010794916,
"acc_norm": 0.5181818181818182,
"acc_norm_stderr": 0.04785964010794916
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.5346938775510204,
"acc_stderr": 0.03193207024425314,
"acc_norm": 0.5346938775510204,
"acc_norm_stderr": 0.03193207024425314
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.5771144278606966,
"acc_stderr": 0.034932317774212816,
"acc_norm": 0.5771144278606966,
"acc_norm_stderr": 0.034932317774212816
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001974
},
"harness|hendrycksTest-virology|5": {
"acc": 0.43373493975903615,
"acc_stderr": 0.03858158940685516,
"acc_norm": 0.43373493975903615,
"acc_norm_stderr": 0.03858158940685516
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.5029239766081871,
"acc_stderr": 0.03834759370936839,
"acc_norm": 0.5029239766081871,
"acc_norm_stderr": 0.03834759370936839
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3023255813953488,
"mc1_stderr": 0.016077509266133022,
"mc2": 0.49647374974901654,
"mc2_stderr": 0.015915065186614973
},
"harness|winogrande|5": {
"acc": 0.6053670086819258,
"acc_stderr": 0.013736915172371888
},
"harness|gsm8k|5": {
"acc": 0.013646702047005308,
"acc_stderr": 0.003195747075480817
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
elliotthwangmsa/Evol-Instruct-Chinese-GPT4-3k | ---
dataset_info:
features:
- name: output
dtype: string
- name: instruction
dtype: string
splits:
- name: train
num_bytes: 4936507.757142857
num_examples: 3000
download_size: 3074697
dataset_size: 4936507.757142857
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
jxu124/llava_instruct_150k | ---
dataset_info:
features:
- name: global_image_id
dtype: string
- name: image_path
dtype: string
- name: dialog
sequence:
sequence: string
- name: anns_id
dtype: string
splits:
- name: train
num_bytes: 187730970
num_examples: 157712
download_size: 95089013
dataset_size: 187730970
---
# Dataset Card for "llava_instruct_150k"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
loubnabnl/gpt4-1k-annotations | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: completion
dtype: string
- name: eval_prompt_header
dtype: string
- name: generation_config
struct:
- name: do_sample
dtype: bool
- name: temperature
dtype: float64
- name: top_p
dtype: float64
- name: metadata
struct:
- name: timestamp
dtype: string
- name: prompt
dtype: string
- name: review_model
dtype: string
- name: score
dtype: float64
- name: code_file
dtype: string
- name: size
dtype: int64
splits:
- name: train
num_bytes: 7384714
num_examples: 1000
download_size: 2350749
dataset_size: 7384714
---
# Dataset Card for "gpt4-1k-annotations"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
DZN222/lucas | ---
license: openrail
---
|
TristanPermentier/some_chives_ones | ---
dataset_info:
features:
- name: pixel_values
dtype: image
- name: label
dtype: image
splits:
- name: train
num_bytes: 21662024.0
num_examples: 29
download_size: 21484795
dataset_size: 21662024.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "some_chives_ones"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
heliosprime/twitter_dataset_1712986344 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 8285
num_examples: 18
download_size: 8029
dataset_size: 8285
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "twitter_dataset_1712986344"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
AI-C/rvc-models | ---
title: Genshin Impact RVC Models (combined)
emoji: 🎤
colorFrom: purple
colorTo: red
sdk: gradio
sdk_version: 3.36.1
app_file: app.py
pinned: false
license: mit
---
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
sazirarrwth99/repo_A | ---
dataset_info:
features:
- name: 'Unnamed: 0'
dtype: int64
- name: lex
dtype: string
- name: category
dtype: string
- name: size
dtype: int64
- name: eid
dtype: int64
- name: original_triple_sets
dtype: string
- name: modified_triple_sets
dtype: string
- name: shape
dtype: string
- name: shape_type
dtype: string
- name: metadata
dtype: string
- name: test_category
dtype: string
- name: dbpedia_links
dtype: string
- name: links
dtype: string
splits:
- name: train
num_bytes: 2978451
num_examples: 3293
download_size: 1057306
dataset_size: 2978451
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "repo_A"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
OUTEIRAL2/VOZIA3 | ---
license: openrail
---
|
ayeshgk/java_bug_ctx_fix_small_4 | ---
dataset_info:
features:
- name: id
dtype: int64
- name: bug_ctx
dtype: string
- name: fixed
dtype: string
splits:
- name: train
num_bytes: 59106
num_examples: 120
- name: validation
num_bytes: 16505
num_examples: 36
- name: test
num_bytes: 2022
num_examples: 4
download_size: 25058
dataset_size: 77633
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
open-llm-leaderboard/details_LeoLM__leo-hessianai-7b | ---
pretty_name: Evaluation run of LeoLM/leo-hessianai-7b
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [LeoLM/leo-hessianai-7b](https://huggingface.co/LeoLM/leo-hessianai-7b) on the\
\ [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the agregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_LeoLM__leo-hessianai-7b\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-10-25T10:03:23.884304](https://huggingface.co/datasets/open-llm-leaderboard/details_LeoLM__leo-hessianai-7b/blob/main/results_2023-10-25T10-03-23.884304.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0009437919463087249,\n\
\ \"em_stderr\": 0.0003144653119413205,\n \"f1\": 0.056075922818791854,\n\
\ \"f1_stderr\": 0.0013232326016856207,\n \"acc\": 0.38874610827245293,\n\
\ \"acc_stderr\": 0.009469282540407879\n },\n \"harness|drop|3\": {\n\
\ \"em\": 0.0009437919463087249,\n \"em_stderr\": 0.0003144653119413205,\n\
\ \"f1\": 0.056075922818791854,\n \"f1_stderr\": 0.0013232326016856207\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.056103108415466264,\n \
\ \"acc_stderr\": 0.006338668431321877\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7213891081294396,\n \"acc_stderr\": 0.01259989664949388\n\
\ }\n}\n```"
repo_url: https://huggingface.co/LeoLM/leo-hessianai-7b
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|arc:challenge|25_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_10_25T10_03_23.884304
path:
- '**/details_harness|drop|3_2023-10-25T10-03-23.884304.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-10-25T10-03-23.884304.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_10_25T10_03_23.884304
path:
- '**/details_harness|gsm8k|5_2023-10-25T10-03-23.884304.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-10-25T10-03-23.884304.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hellaswag|10_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-10-08T17-16-14.181420.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-management|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-virology|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- '**/details_harness|truthfulqa:mc|0_2023-10-08T17-16-14.181420.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-10-08T17-16-14.181420.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_10_25T10_03_23.884304
path:
- '**/details_harness|winogrande|5_2023-10-25T10-03-23.884304.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-10-25T10-03-23.884304.parquet'
- config_name: results
data_files:
- split: 2023_10_08T17_16_14.181420
path:
- results_2023-10-08T17-16-14.181420.parquet
- split: 2023_10_25T10_03_23.884304
path:
- results_2023-10-25T10-03-23.884304.parquet
- split: latest
path:
- results_2023-10-25T10-03-23.884304.parquet
---
# Dataset Card for Evaluation run of LeoLM/leo-hessianai-7b
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/LeoLM/leo-hessianai-7b
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [LeoLM/leo-hessianai-7b](https://huggingface.co/LeoLM/leo-hessianai-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_LeoLM__leo-hessianai-7b",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-10-25T10:03:23.884304](https://huggingface.co/datasets/open-llm-leaderboard/details_LeoLM__leo-hessianai-7b/blob/main/results_2023-10-25T10-03-23.884304.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"em": 0.0009437919463087249,
"em_stderr": 0.0003144653119413205,
"f1": 0.056075922818791854,
"f1_stderr": 0.0013232326016856207,
"acc": 0.38874610827245293,
"acc_stderr": 0.009469282540407879
},
"harness|drop|3": {
"em": 0.0009437919463087249,
"em_stderr": 0.0003144653119413205,
"f1": 0.056075922818791854,
"f1_stderr": 0.0013232326016856207
},
"harness|gsm8k|5": {
"acc": 0.056103108415466264,
"acc_stderr": 0.006338668431321877
},
"harness|winogrande|5": {
"acc": 0.7213891081294396,
"acc_stderr": 0.01259989664949388
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
liuyanchen1015/MULTI_VALUE_wnli_who_at | ---
dataset_info:
features:
- name: sentence1
dtype: string
- name: sentence2
dtype: string
- name: label
dtype: int64
- name: idx
dtype: int64
- name: value_score
dtype: int64
splits:
- name: dev
num_bytes: 1153
num_examples: 5
- name: test
num_bytes: 4996
num_examples: 14
- name: train
num_bytes: 6964
num_examples: 24
download_size: 14664
dataset_size: 13113
---
# Dataset Card for "MULTI_VALUE_wnli_who_at"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
open-llm-leaderboard/details_PulsarAI__Chat-AYB-Nova-13B | ---
pretty_name: Evaluation run of PulsarAI/Chat-AYB-Nova-13B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [PulsarAI/Chat-AYB-Nova-13B](https://huggingface.co/PulsarAI/Chat-AYB-Nova-13B)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the agregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__Chat-AYB-Nova-13B\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-10-27T20:18:17.450635](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__Chat-AYB-Nova-13B/blob/main/results_2023-10-27T20-18-17.450635.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0041946308724832215,\n\
\ \"em_stderr\": 0.0006618716168266419,\n \"f1\": 0.0802946728187919,\n\
\ \"f1_stderr\": 0.0016873252068220475,\n \"acc\": 0.44971346473405205,\n\
\ \"acc_stderr\": 0.010392725523775513\n },\n \"harness|drop|3\": {\n\
\ \"em\": 0.0041946308724832215,\n \"em_stderr\": 0.0006618716168266419,\n\
\ \"f1\": 0.0802946728187919,\n \"f1_stderr\": 0.0016873252068220475\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.12357846853677028,\n \
\ \"acc_stderr\": 0.009065050306776921\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7758484609313339,\n \"acc_stderr\": 0.011720400740774104\n\
\ }\n}\n```"
repo_url: https://huggingface.co/PulsarAI/Chat-AYB-Nova-13B
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|arc:challenge|25_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_10_27T20_18_17.450635
path:
- '**/details_harness|drop|3_2023-10-27T20-18-17.450635.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-10-27T20-18-17.450635.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_10_27T20_18_17.450635
path:
- '**/details_harness|gsm8k|5_2023-10-27T20-18-17.450635.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-10-27T20-18-17.450635.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hellaswag|10_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-10-08T14-44-32.660445.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-management|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-virology|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- '**/details_harness|truthfulqa:mc|0_2023-10-08T14-44-32.660445.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-10-08T14-44-32.660445.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_10_27T20_18_17.450635
path:
- '**/details_harness|winogrande|5_2023-10-27T20-18-17.450635.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-10-27T20-18-17.450635.parquet'
- config_name: results
data_files:
- split: 2023_10_08T14_44_32.660445
path:
- results_2023-10-08T14-44-32.660445.parquet
- split: 2023_10_27T20_18_17.450635
path:
- results_2023-10-27T20-18-17.450635.parquet
- split: latest
path:
- results_2023-10-27T20-18-17.450635.parquet
---
# Dataset Card for Evaluation run of PulsarAI/Chat-AYB-Nova-13B
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/PulsarAI/Chat-AYB-Nova-13B
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [PulsarAI/Chat-AYB-Nova-13B](https://huggingface.co/PulsarAI/Chat-AYB-Nova-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_PulsarAI__Chat-AYB-Nova-13B",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-10-27T20:18:17.450635](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__Chat-AYB-Nova-13B/blob/main/results_2023-10-27T20-18-17.450635.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"em": 0.0041946308724832215,
"em_stderr": 0.0006618716168266419,
"f1": 0.0802946728187919,
"f1_stderr": 0.0016873252068220475,
"acc": 0.44971346473405205,
"acc_stderr": 0.010392725523775513
},
"harness|drop|3": {
"em": 0.0041946308724832215,
"em_stderr": 0.0006618716168266419,
"f1": 0.0802946728187919,
"f1_stderr": 0.0016873252068220475
},
"harness|gsm8k|5": {
"acc": 0.12357846853677028,
"acc_stderr": 0.009065050306776921
},
"harness|winogrande|5": {
"acc": 0.7758484609313339,
"acc_stderr": 0.011720400740774104
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v14-7B | ---
pretty_name: Evaluation run of xzuyn/LLaMa-2-PeanutButter_v14-7B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [xzuyn/LLaMa-2-PeanutButter_v14-7B](https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v14-7B)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 61 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the agregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v14-7B\"\
,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\
\nThese are the [latest results from run 2023-08-31T13:28:42.641649](https://huggingface.co/datasets/open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v14-7B/blob/main/results_2023-08-31T13%3A28%3A42.641649.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.46314650559918413,\n\
\ \"acc_stderr\": 0.0353597619312551,\n \"acc_norm\": 0.4669718546477287,\n\
\ \"acc_norm_stderr\": 0.03534376319528717,\n \"mc1\": 0.27906976744186046,\n\
\ \"mc1_stderr\": 0.0157021070906279,\n \"mc2\": 0.44677492914800465,\n\
\ \"mc2_stderr\": 0.015984529713376692\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5051194539249146,\n \"acc_stderr\": 0.014610624890309157,\n\
\ \"acc_norm\": 0.5418088737201365,\n \"acc_norm_stderr\": 0.014560220308714697\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6148177653853814,\n\
\ \"acc_stderr\": 0.004856437955719853,\n \"acc_norm\": 0.803823939454292,\n\
\ \"acc_norm_stderr\": 0.003962917115206181\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621502,\n \
\ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621502\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4962962962962963,\n\
\ \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.4962962962962963,\n\
\ \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.4407894736842105,\n \"acc_stderr\": 0.040403110624904356,\n\
\ \"acc_norm\": 0.4407894736842105,\n \"acc_norm_stderr\": 0.040403110624904356\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.48,\n\
\ \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \
\ \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.4867924528301887,\n \"acc_stderr\": 0.030762134874500476,\n\
\ \"acc_norm\": 0.4867924528301887,\n \"acc_norm_stderr\": 0.030762134874500476\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4861111111111111,\n\
\ \"acc_stderr\": 0.04179596617581002,\n \"acc_norm\": 0.4861111111111111,\n\
\ \"acc_norm_stderr\": 0.04179596617581002\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n\
\ \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.41040462427745666,\n\
\ \"acc_stderr\": 0.03750757044895537,\n \"acc_norm\": 0.41040462427745666,\n\
\ \"acc_norm_stderr\": 0.03750757044895537\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179963,\n\
\ \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179963\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.51,\n \"acc_stderr\": 0.05024183937956913,\n \"acc_norm\": 0.51,\n\
\ \"acc_norm_stderr\": 0.05024183937956913\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.425531914893617,\n \"acc_stderr\": 0.03232146916224468,\n\
\ \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.03232146916224468\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.32456140350877194,\n\
\ \"acc_stderr\": 0.04404556157374767,\n \"acc_norm\": 0.32456140350877194,\n\
\ \"acc_norm_stderr\": 0.04404556157374767\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.4413793103448276,\n \"acc_stderr\": 0.04137931034482758,\n\
\ \"acc_norm\": 0.4413793103448276,\n \"acc_norm_stderr\": 0.04137931034482758\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.3148148148148148,\n \"acc_stderr\": 0.02391998416404773,\n \"\
acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.02391998416404773\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23809523809523808,\n\
\ \"acc_stderr\": 0.03809523809523811,\n \"acc_norm\": 0.23809523809523808,\n\
\ \"acc_norm_stderr\": 0.03809523809523811\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.4935483870967742,\n\
\ \"acc_stderr\": 0.02844163823354051,\n \"acc_norm\": 0.4935483870967742,\n\
\ \"acc_norm_stderr\": 0.02844163823354051\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.3891625615763547,\n \"acc_stderr\": 0.03430462416103872,\n\
\ \"acc_norm\": 0.3891625615763547,\n \"acc_norm_stderr\": 0.03430462416103872\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\"\
: 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.5393939393939394,\n \"acc_stderr\": 0.03892207016552013,\n\
\ \"acc_norm\": 0.5393939393939394,\n \"acc_norm_stderr\": 0.03892207016552013\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.5303030303030303,\n \"acc_stderr\": 0.03555804051763929,\n \"\
acc_norm\": 0.5303030303030303,\n \"acc_norm_stderr\": 0.03555804051763929\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.6528497409326425,\n \"acc_stderr\": 0.03435696168361355,\n\
\ \"acc_norm\": 0.6528497409326425,\n \"acc_norm_stderr\": 0.03435696168361355\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.43333333333333335,\n \"acc_stderr\": 0.025124653525885124,\n\
\ \"acc_norm\": 0.43333333333333335,\n \"acc_norm_stderr\": 0.025124653525885124\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.29259259259259257,\n \"acc_stderr\": 0.027738969632176088,\n \
\ \"acc_norm\": 0.29259259259259257,\n \"acc_norm_stderr\": 0.027738969632176088\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.4327731092436975,\n \"acc_stderr\": 0.03218358107742613,\n \
\ \"acc_norm\": 0.4327731092436975,\n \"acc_norm_stderr\": 0.03218358107742613\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.304635761589404,\n \"acc_stderr\": 0.03757949922943342,\n \"acc_norm\"\
: 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943342\n },\n\
\ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.6238532110091743,\n\
\ \"acc_stderr\": 0.020769231968205085,\n \"acc_norm\": 0.6238532110091743,\n\
\ \"acc_norm_stderr\": 0.020769231968205085\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\
: {\n \"acc\": 0.27314814814814814,\n \"acc_stderr\": 0.030388051301678116,\n\
\ \"acc_norm\": 0.27314814814814814,\n \"acc_norm_stderr\": 0.030388051301678116\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.5686274509803921,\n \"acc_stderr\": 0.03476099060501636,\n \"\
acc_norm\": 0.5686274509803921,\n \"acc_norm_stderr\": 0.03476099060501636\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.5907172995780591,\n \"acc_stderr\": 0.032007041833595914,\n \
\ \"acc_norm\": 0.5907172995780591,\n \"acc_norm_stderr\": 0.032007041833595914\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.48878923766816146,\n\
\ \"acc_stderr\": 0.033549366530984746,\n \"acc_norm\": 0.48878923766816146,\n\
\ \"acc_norm_stderr\": 0.033549366530984746\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.5114503816793893,\n \"acc_stderr\": 0.043841400240780176,\n\
\ \"acc_norm\": 0.5114503816793893,\n \"acc_norm_stderr\": 0.043841400240780176\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.5950413223140496,\n \"acc_stderr\": 0.04481137755942469,\n \"\
acc_norm\": 0.5950413223140496,\n \"acc_norm_stderr\": 0.04481137755942469\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.4722222222222222,\n\
\ \"acc_stderr\": 0.04826217294139894,\n \"acc_norm\": 0.4722222222222222,\n\
\ \"acc_norm_stderr\": 0.04826217294139894\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.5030674846625767,\n \"acc_stderr\": 0.03928297078179663,\n\
\ \"acc_norm\": 0.5030674846625767,\n \"acc_norm_stderr\": 0.03928297078179663\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n\
\ \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \
\ \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.5145631067961165,\n \"acc_stderr\": 0.049486373240266356,\n\
\ \"acc_norm\": 0.5145631067961165,\n \"acc_norm_stderr\": 0.049486373240266356\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6752136752136753,\n\
\ \"acc_stderr\": 0.03067902276549883,\n \"acc_norm\": 0.6752136752136753,\n\
\ \"acc_norm_stderr\": 0.03067902276549883\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.52,\n \"acc_stderr\": 0.05021167315686779,\n \
\ \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.05021167315686779\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6372924648786717,\n\
\ \"acc_stderr\": 0.017192708674602302,\n \"acc_norm\": 0.6372924648786717,\n\
\ \"acc_norm_stderr\": 0.017192708674602302\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.4797687861271676,\n \"acc_stderr\": 0.026897049996382868,\n\
\ \"acc_norm\": 0.4797687861271676,\n \"acc_norm_stderr\": 0.026897049996382868\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.30837988826815643,\n\
\ \"acc_stderr\": 0.015445716910998877,\n \"acc_norm\": 0.30837988826815643,\n\
\ \"acc_norm_stderr\": 0.015445716910998877\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.5130718954248366,\n \"acc_stderr\": 0.028620130800700246,\n\
\ \"acc_norm\": 0.5130718954248366,\n \"acc_norm_stderr\": 0.028620130800700246\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5305466237942122,\n\
\ \"acc_stderr\": 0.028345045864840622,\n \"acc_norm\": 0.5305466237942122,\n\
\ \"acc_norm_stderr\": 0.028345045864840622\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.5061728395061729,\n \"acc_stderr\": 0.027818623962583295,\n\
\ \"acc_norm\": 0.5061728395061729,\n \"acc_norm_stderr\": 0.027818623962583295\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.36879432624113473,\n \"acc_stderr\": 0.028782227561347247,\n \
\ \"acc_norm\": 0.36879432624113473,\n \"acc_norm_stderr\": 0.028782227561347247\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.35071707953063885,\n\
\ \"acc_stderr\": 0.01218777337074152,\n \"acc_norm\": 0.35071707953063885,\n\
\ \"acc_norm_stderr\": 0.01218777337074152\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.5036764705882353,\n \"acc_stderr\": 0.030372015885428188,\n\
\ \"acc_norm\": 0.5036764705882353,\n \"acc_norm_stderr\": 0.030372015885428188\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.43300653594771243,\n \"acc_stderr\": 0.02004544247332423,\n \
\ \"acc_norm\": 0.43300653594771243,\n \"acc_norm_stderr\": 0.02004544247332423\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5363636363636364,\n\
\ \"acc_stderr\": 0.04776449162396197,\n \"acc_norm\": 0.5363636363636364,\n\
\ \"acc_norm_stderr\": 0.04776449162396197\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.49795918367346936,\n \"acc_stderr\": 0.0320089533497105,\n\
\ \"acc_norm\": 0.49795918367346936,\n \"acc_norm_stderr\": 0.0320089533497105\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5970149253731343,\n\
\ \"acc_stderr\": 0.034683432951111266,\n \"acc_norm\": 0.5970149253731343,\n\
\ \"acc_norm_stderr\": 0.034683432951111266\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \
\ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39759036144578314,\n\
\ \"acc_stderr\": 0.038099730845402184,\n \"acc_norm\": 0.39759036144578314,\n\
\ \"acc_norm_stderr\": 0.038099730845402184\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.6842105263157895,\n \"acc_stderr\": 0.03565079670708312,\n\
\ \"acc_norm\": 0.6842105263157895,\n \"acc_norm_stderr\": 0.03565079670708312\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.27906976744186046,\n\
\ \"mc1_stderr\": 0.0157021070906279,\n \"mc2\": 0.44677492914800465,\n\
\ \"mc2_stderr\": 0.015984529713376692\n }\n}\n```"
repo_url: https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v14-7B
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|arc:challenge|25_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hellaswag|10_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-08-31T13:28:42.641649.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-management|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-virology|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-08-31T13:28:42.641649.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- '**/details_harness|truthfulqa:mc|0_2023-08-31T13:28:42.641649.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-08-31T13:28:42.641649.parquet'
- config_name: results
data_files:
- split: 2023_08_31T13_28_42.641649
path:
- results_2023-08-31T13:28:42.641649.parquet
- split: latest
path:
- results_2023-08-31T13:28:42.641649.parquet
---
# Dataset Card for Evaluation run of xzuyn/LLaMa-2-PeanutButter_v14-7B
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v14-7B
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [xzuyn/LLaMa-2-PeanutButter_v14-7B](https://huggingface.co/xzuyn/LLaMa-2-PeanutButter_v14-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v14-7B",
"harness_truthfulqa_mc_0",
split="train")
```
## Latest results
These are the [latest results from run 2023-08-31T13:28:42.641649](https://huggingface.co/datasets/open-llm-leaderboard/details_xzuyn__LLaMa-2-PeanutButter_v14-7B/blob/main/results_2023-08-31T13%3A28%3A42.641649.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.46314650559918413,
"acc_stderr": 0.0353597619312551,
"acc_norm": 0.4669718546477287,
"acc_norm_stderr": 0.03534376319528717,
"mc1": 0.27906976744186046,
"mc1_stderr": 0.0157021070906279,
"mc2": 0.44677492914800465,
"mc2_stderr": 0.015984529713376692
},
"harness|arc:challenge|25": {
"acc": 0.5051194539249146,
"acc_stderr": 0.014610624890309157,
"acc_norm": 0.5418088737201365,
"acc_norm_stderr": 0.014560220308714697
},
"harness|hellaswag|10": {
"acc": 0.6148177653853814,
"acc_stderr": 0.004856437955719853,
"acc_norm": 0.803823939454292,
"acc_norm_stderr": 0.003962917115206181
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621502,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621502
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.4962962962962963,
"acc_stderr": 0.04319223625811331,
"acc_norm": 0.4962962962962963,
"acc_norm_stderr": 0.04319223625811331
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.4407894736842105,
"acc_stderr": 0.040403110624904356,
"acc_norm": 0.4407894736842105,
"acc_norm_stderr": 0.040403110624904356
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.4867924528301887,
"acc_stderr": 0.030762134874500476,
"acc_norm": 0.4867924528301887,
"acc_norm_stderr": 0.030762134874500476
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.4861111111111111,
"acc_stderr": 0.04179596617581002,
"acc_norm": 0.4861111111111111,
"acc_norm_stderr": 0.04179596617581002
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.41040462427745666,
"acc_stderr": 0.03750757044895537,
"acc_norm": 0.41040462427745666,
"acc_norm_stderr": 0.03750757044895537
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.19607843137254902,
"acc_stderr": 0.03950581861179963,
"acc_norm": 0.19607843137254902,
"acc_norm_stderr": 0.03950581861179963
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956913,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956913
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.425531914893617,
"acc_stderr": 0.03232146916224468,
"acc_norm": 0.425531914893617,
"acc_norm_stderr": 0.03232146916224468
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.32456140350877194,
"acc_stderr": 0.04404556157374767,
"acc_norm": 0.32456140350877194,
"acc_norm_stderr": 0.04404556157374767
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.4413793103448276,
"acc_stderr": 0.04137931034482758,
"acc_norm": 0.4413793103448276,
"acc_norm_stderr": 0.04137931034482758
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3148148148148148,
"acc_stderr": 0.02391998416404773,
"acc_norm": 0.3148148148148148,
"acc_norm_stderr": 0.02391998416404773
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.23809523809523808,
"acc_stderr": 0.03809523809523811,
"acc_norm": 0.23809523809523808,
"acc_norm_stderr": 0.03809523809523811
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.4935483870967742,
"acc_stderr": 0.02844163823354051,
"acc_norm": 0.4935483870967742,
"acc_norm_stderr": 0.02844163823354051
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.3891625615763547,
"acc_stderr": 0.03430462416103872,
"acc_norm": 0.3891625615763547,
"acc_norm_stderr": 0.03430462416103872
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.45,
"acc_stderr": 0.049999999999999996,
"acc_norm": 0.45,
"acc_norm_stderr": 0.049999999999999996
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.5393939393939394,
"acc_stderr": 0.03892207016552013,
"acc_norm": 0.5393939393939394,
"acc_norm_stderr": 0.03892207016552013
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.5303030303030303,
"acc_stderr": 0.03555804051763929,
"acc_norm": 0.5303030303030303,
"acc_norm_stderr": 0.03555804051763929
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.6528497409326425,
"acc_stderr": 0.03435696168361355,
"acc_norm": 0.6528497409326425,
"acc_norm_stderr": 0.03435696168361355
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.43333333333333335,
"acc_stderr": 0.025124653525885124,
"acc_norm": 0.43333333333333335,
"acc_norm_stderr": 0.025124653525885124
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.29259259259259257,
"acc_stderr": 0.027738969632176088,
"acc_norm": 0.29259259259259257,
"acc_norm_stderr": 0.027738969632176088
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.4327731092436975,
"acc_stderr": 0.03218358107742613,
"acc_norm": 0.4327731092436975,
"acc_norm_stderr": 0.03218358107742613
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943342,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943342
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.6238532110091743,
"acc_stderr": 0.020769231968205085,
"acc_norm": 0.6238532110091743,
"acc_norm_stderr": 0.020769231968205085
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.27314814814814814,
"acc_stderr": 0.030388051301678116,
"acc_norm": 0.27314814814814814,
"acc_norm_stderr": 0.030388051301678116
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.5686274509803921,
"acc_stderr": 0.03476099060501636,
"acc_norm": 0.5686274509803921,
"acc_norm_stderr": 0.03476099060501636
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.5907172995780591,
"acc_stderr": 0.032007041833595914,
"acc_norm": 0.5907172995780591,
"acc_norm_stderr": 0.032007041833595914
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.48878923766816146,
"acc_stderr": 0.033549366530984746,
"acc_norm": 0.48878923766816146,
"acc_norm_stderr": 0.033549366530984746
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.5114503816793893,
"acc_stderr": 0.043841400240780176,
"acc_norm": 0.5114503816793893,
"acc_norm_stderr": 0.043841400240780176
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.5950413223140496,
"acc_stderr": 0.04481137755942469,
"acc_norm": 0.5950413223140496,
"acc_norm_stderr": 0.04481137755942469
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.4722222222222222,
"acc_stderr": 0.04826217294139894,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.04826217294139894
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.5030674846625767,
"acc_stderr": 0.03928297078179663,
"acc_norm": 0.5030674846625767,
"acc_norm_stderr": 0.03928297078179663
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.375,
"acc_stderr": 0.04595091388086298,
"acc_norm": 0.375,
"acc_norm_stderr": 0.04595091388086298
},
"harness|hendrycksTest-management|5": {
"acc": 0.5145631067961165,
"acc_stderr": 0.049486373240266356,
"acc_norm": 0.5145631067961165,
"acc_norm_stderr": 0.049486373240266356
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.6752136752136753,
"acc_stderr": 0.03067902276549883,
"acc_norm": 0.6752136752136753,
"acc_norm_stderr": 0.03067902276549883
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.52,
"acc_stderr": 0.05021167315686779,
"acc_norm": 0.52,
"acc_norm_stderr": 0.05021167315686779
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.6372924648786717,
"acc_stderr": 0.017192708674602302,
"acc_norm": 0.6372924648786717,
"acc_norm_stderr": 0.017192708674602302
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.4797687861271676,
"acc_stderr": 0.026897049996382868,
"acc_norm": 0.4797687861271676,
"acc_norm_stderr": 0.026897049996382868
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.30837988826815643,
"acc_stderr": 0.015445716910998877,
"acc_norm": 0.30837988826815643,
"acc_norm_stderr": 0.015445716910998877
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.5130718954248366,
"acc_stderr": 0.028620130800700246,
"acc_norm": 0.5130718954248366,
"acc_norm_stderr": 0.028620130800700246
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.5305466237942122,
"acc_stderr": 0.028345045864840622,
"acc_norm": 0.5305466237942122,
"acc_norm_stderr": 0.028345045864840622
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.5061728395061729,
"acc_stderr": 0.027818623962583295,
"acc_norm": 0.5061728395061729,
"acc_norm_stderr": 0.027818623962583295
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.36879432624113473,
"acc_stderr": 0.028782227561347247,
"acc_norm": 0.36879432624113473,
"acc_norm_stderr": 0.028782227561347247
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.35071707953063885,
"acc_stderr": 0.01218777337074152,
"acc_norm": 0.35071707953063885,
"acc_norm_stderr": 0.01218777337074152
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.5036764705882353,
"acc_stderr": 0.030372015885428188,
"acc_norm": 0.5036764705882353,
"acc_norm_stderr": 0.030372015885428188
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.43300653594771243,
"acc_stderr": 0.02004544247332423,
"acc_norm": 0.43300653594771243,
"acc_norm_stderr": 0.02004544247332423
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.5363636363636364,
"acc_stderr": 0.04776449162396197,
"acc_norm": 0.5363636363636364,
"acc_norm_stderr": 0.04776449162396197
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.49795918367346936,
"acc_stderr": 0.0320089533497105,
"acc_norm": 0.49795918367346936,
"acc_norm_stderr": 0.0320089533497105
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.5970149253731343,
"acc_stderr": 0.034683432951111266,
"acc_norm": 0.5970149253731343,
"acc_norm_stderr": 0.034683432951111266
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542128
},
"harness|hendrycksTest-virology|5": {
"acc": 0.39759036144578314,
"acc_stderr": 0.038099730845402184,
"acc_norm": 0.39759036144578314,
"acc_norm_stderr": 0.038099730845402184
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.6842105263157895,
"acc_stderr": 0.03565079670708312,
"acc_norm": 0.6842105263157895,
"acc_norm_stderr": 0.03565079670708312
},
"harness|truthfulqa:mc|0": {
"mc1": 0.27906976744186046,
"mc1_stderr": 0.0157021070906279,
"mc2": 0.44677492914800465,
"mc2_stderr": 0.015984529713376692
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
adilhabibi/bioacoustic_segment_img | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': Bare-throated Whistler
'1': Common Paradise Kingfisher
'2': Dusky Megapode
'3': Green-backed Kingfisher
'4': Ivory-breasted Pitta
'5': Red Bird-of-paradise
'6': Rusty-breasted Cuckoo
'7': Standardwing
'8': Sulawesi Bush Warbler
'9': Sulawesi Scops Owl
'10': Wilson's Bird-of-paradise
splits:
- name: train
num_bytes: 8842324.329
num_examples: 1457
download_size: 8825727
dataset_size: 8842324.329
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Laurie/bytedance_rag_eval | ---
license: mit
---
|
communityai/HuggingFaceH4___deita-10k-v0-sft | ---
dataset_info:
features:
- name: source
dtype: string
- name: conversations
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 345005293.0
num_examples: 9500
download_size: 136782991
dataset_size: 345005293.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Aniemore/resd_annotated_multi | ---
dataset_info:
features:
- name: name
dtype: string
- name: path
dtype: string
- name: speech
sequence: float64
- name: text
dtype: string
- name: emotion
dtype: string
- name: text_emotions
sequence: string
splits:
- name: train
num_bytes: 866818029
num_examples: 1116
- name: test
num_bytes: 212496333
num_examples: 280
download_size: 615228382
dataset_size: 1079314362
---
# Dataset Card for "resd_annotated_multi"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
coding-assistant-custom/dummy-dataset-sayak | ---
dataset_info:
features:
- name: reponame
dtype: string
- name: filepath
dtype: string
- name: content
dtype: string
splits:
- name: train
num_bytes: 79032
num_examples: 18
download_size: 27713
dataset_size: 79032
---
# Dataset Card for "dummy-dataset-sayak"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
malaysia-ai/Multiturn-JPJ-Test-Prep | ---
license: apache-2.0
---
Multi-turn conversation generated using Mistral-V4 on JPJ-Test-Prep questions. |
juancopi81/binomial_3blue1brown_test | ---
dataset_info:
features:
- name: CHANNEL_NAME
dtype: string
- name: URL
dtype: string
- name: TITLE
dtype: string
- name: DESCRIPTION
dtype: string
- name: TRANSCRIPTION
dtype: string
- name: SEGMENTS
dtype: string
splits:
- name: train
num_bytes: 59462
num_examples: 2
download_size: 44700
dataset_size: 59462
---
# Dataset Card for "binomial_3blue1brown_test"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
HoangHa/SMILES-250M | ---
dataset_info:
features:
- name: SMILES
dtype: string
splits:
- name: train
num_bytes: 1626837623.1854794
num_examples: 84345972
download_size: 1799608655
dataset_size: 1626837623.1854794
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "SMILES-250M"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Seanxh/twitter_dataset_1713199616 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 102146
num_examples: 238
download_size: 40787
dataset_size: 102146
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
GETALP/FLUE_WSD | ---
license: lgpl
multilinguality:
- monolingual
language:
- fr
task_categories:
- other
task_ids:
- word-sense-disambiguation
dataset_info:
features:
- name: document_id
dtype: string
- name: sentence
dtype: string
- name: sentence_label
dtype: string
- name: sentence_first_label
dtype: string
- name: surface_forms
sequence: string
- name: labels
sequence: string
- name: first_labels
sequence: string
- name: word_id
sequence: string
- name: scores
sequence: string
- name: lemmas
sequence: string
- name: pos
sequence: string
splits:
- name: SemCor
num_bytes: 71632913
num_examples: 37176
- name: SemEval
num_bytes: 749832
num_examples: 306
- name: WNGT
num_bytes: 206691837
num_examples: 117659
download_size: 41831981
dataset_size: 279074582
---
# Word Sense Disambiguation for FLUE
## Dataset Description
- **Homepage:**
- **Repository:**
- **https://arxiv.org/pdf/1905.05677.pdf**
- **Leaderboard:**
- **loic.vial@univ-grenoble-alpes.fr**
### Dataset Summary
This dataset is splitted in 3 sub-datasets: FrenchSemEval-Task12, French WNGT and an automatic translation of SemCor.
### Supported Tasks and Leaderboards
Word Sense Disambiguation for French.
### Language
French
### Licensing Information
```
GNU Lesser General Public License
```
### Citation Information
```bibtex
@inproceedings{vial-etal-2019-sense,
title = "Sense Vocabulary Compression through the Semantic Knowledge of {W}ord{N}et for Neural Word Sense Disambiguation",
author = {Vial, Lo{\"\i}c and
Lecouteux, Benjamin and
Schwab, Didier},
booktitle = "Proceedings of the 10th Global Wordnet Conference",
month = jul,
year = "2019",
address = "Wroclaw, Poland",
publisher = "Global Wordnet Association",
url = "https://aclanthology.org/2019.gwc-1.14",
pages = "108--117",
abstract = "In this article, we tackle the issue of the limited quantity of manually sense annotated corpora for the task of word sense disambiguation, by exploiting the semantic relationships between senses such as synonymy, hypernymy and hyponymy, in order to compress the sense vocabulary of Princeton WordNet, and thus reduce the number of different sense tags that must be observed to disambiguate all words of the lexical database. We propose two different methods that greatly reduce the size of neural WSD models, with the benefit of improving their coverage without additional training data, and without impacting their precision. In addition to our methods, we present a WSD system which relies on pre-trained BERT word vectors in order to achieve results that significantly outperforms the state of the art on all WSD evaluation tasks.",
}
```
### Contributions
* loic.vial@univ-grenoble-alpes.fr
* benjamin.lecouteux@univ-grenoble-alpes.fr
* didier.schwab@univ-grenoble-alpes.fr |
xingkunliuxtracta/processed_demo | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
dataset_info:
features:
- name: id
dtype: string
- name: package_name
dtype: string
- name: review
dtype: string
- name: date
dtype: string
- name: star
dtype: int64
- name: version_id
dtype: int64
splits:
- name: train
num_bytes: 1508
num_examples: 5
- name: test
num_bytes: 956
num_examples: 5
download_size: 9453
dataset_size: 2464
---
# Dataset Card for "processed_demo"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
navinaananthan/Kurdish-Sorani-Parallel-Corpus | ---
language:
- ku
--- |
fivetech/tao | ---
license: mit
---
|
joey234/mmlu-high_school_microeconomics-neg | ---
dataset_info:
features:
- name: choices
sequence: string
- name: answer
dtype:
class_label:
names:
'0': A
'1': B
'2': C
'3': D
- name: question
dtype: string
splits:
- name: test
num_bytes: 71310
num_examples: 238
download_size: 36920
dataset_size: 71310
---
# Dataset Card for "mmlu-high_school_microeconomics-neg"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
nluai/ZaloAI_NouResearch | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: output
dtype: string
splits:
- name: validation
num_bytes: 261148
num_examples: 687
download_size: 131765
dataset_size: 261148
configs:
- config_name: default
data_files:
- split: validation
path: data/validation-*
---
|
mgane/2D_Video_Game_Cartoon_Character_Sprite-Sheets | ---
task_categories:
- text-to-image
- image-classification
- image-to-image
language:
- en
tags:
- art
- video games
size_categories:
- n<1K
---
# Dataset Card for Dataset Name
## Dataset Details
Experimental composition of 76 cartoon art-style video game character spritesheets. Resized to 512x512, mixed variation of animation styles.
### Dataset Description
All images editted using Tiled image editting software as most assets are typically downloaded individually and not in sequence. I compiled each animation sequence into one img to display animations frame-by-frame evenly distributed across some common animations seen in 2D video game art (Idle, Attack, Walk, Running, etc). I had used this same image set for some experimental tests on Stable Diffusion XL via LORA and Dreambooth training methods for some solid results post-training.
- **Curated by:** [m-gane]
### Disclaimer
None of these characters were from my original making, but a compilation from open-source 2D video game asset sites from various artists. For more information regarding source assets please check out: https://itch.io/game-assets/tag-2d and https://opengameart.org/. |
indiejoseph/wikipedia-zh-yue-summaries | ---
dataset_info:
features:
- name: summary
dtype: string
- name: title
dtype: string
- name: category
dtype: string
- name: content
dtype: string
splits:
- name: train
num_bytes: 9555531
num_examples: 7109
download_size: 6795226
dataset_size: 9555531
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "wikipedia-zh-yue-summaries"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
AnonymousPaperSubmissions/Testing_all_variables | ---
license: mit
---
|
tyzhu/squad_qa_title_v5_full_recite_full_passage_random_permute_rerun_8 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
dataset_info:
features:
- name: id
dtype: string
- name: title
dtype: string
- name: context
dtype: string
- name: question
dtype: string
- name: answers
sequence:
- name: text
dtype: string
- name: answer_start
dtype: int32
- name: answer
dtype: string
- name: context_id
dtype: string
- name: inputs
dtype: string
- name: targets
dtype: string
splits:
- name: train
num_bytes: 11290731.700778643
num_examples: 6305
- name: validation
num_bytes: 590772
num_examples: 300
download_size: 1824305
dataset_size: 11881503.700778643
---
# Dataset Card for "squad_qa_title_v5_full_recite_full_passage_random_permute_rerun_8"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
lmqg/qa_squadshifts_synthetic | ---
license: cc-by-4.0
pretty_name: Synthetic QA dataset on SQuADShifts.
language: en
multilinguality: monolingual
size_categories: 10K<n<100K
source_datasets:
- extended|wikipedia
task_categories:
- question-answering
task_ids:
- extractive-qa
---
# Dataset Card for "lmqg/qa_squadshifts_synthetic"
## Dataset Description
- **Repository:** [https://github.com/asahi417/lm-question-generation](https://github.com/asahi417/lm-question-generation)
- **Paper:** [https://arxiv.org/abs/2210.03992](https://arxiv.org/abs/2210.03992)
- **Point of Contact:** [Asahi Ushio](http://asahiushio.com/)
### Dataset Summary
This is a synthetic QA dataset generated with fine-tuned QG models over [`lmqg/qa_squadshifts`](https://huggingface.co/datasets/lmqg/qa_squadshifts), made for question-answering based evaluation (QAE) for question generation model proposed by [Zhang and Bansal, 2019](https://aclanthology.org/D19-1253/).
The test split is the original validation set of [`lmqg/qa_squadshifts`](https://huggingface.co/datasets/lmqg/qa_squadshifts), where the model should be evaluate on.
### Supported Tasks and Leaderboards
* `question-answering`
### Languages
English (en)
## Dataset Structure
### Data Fields
The data fields are the same among all splits.
#### plain_text
- `id`: a `string` feature of id
- `title`: a `string` feature of title of the paragraph
- `context`: a `string` feature of paragraph
- `question`: a `string` feature of question
- `answers`: a `json` feature of answers
### Data Splits
TBA
## Citation Information
```
@inproceedings{ushio-etal-2022-generative,
title = "{G}enerative {L}anguage {M}odels for {P}aragraph-{L}evel {Q}uestion {G}eneration",
author = "Ushio, Asahi and
Alva-Manchego, Fernando and
Camacho-Collados, Jose",
booktitle = "Proceedings of the 2022 Conference on Empirical Methods in Natural Language Processing",
month = dec,
year = "2022",
address = "Abu Dhabi, U.A.E.",
publisher = "Association for Computational Linguistics",
}
``` |
mrseeker87/bittensor_qa | ---
license: cc-by-sa-4.0
task_categories:
- question-answering
language:
- en
size_categories:
- 1K<n<10K
---
Dataset created from bittensor's subnet1. Will be constantly updated as I add more Q/A.
Dataset is currently in "raw" format, would love to have something prettier for loading into datasets. |
moizsajid/dreambooth-markhor | ---
title: Markhor images from iStock
tags:
- keras-dreambooth
- nature
- diffusers
- text-to-image
--- |
open-llm-leaderboard/details_BFauber__lora_llama2-13b_10e5_r128_a64 | ---
pretty_name: Evaluation run of BFauber/lora_llama2-13b_10e5_r128_a64
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [BFauber/lora_llama2-13b_10e5_r128_a64](https://huggingface.co/BFauber/lora_llama2-13b_10e5_r128_a64)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_BFauber__lora_llama2-13b_10e5_r128_a64\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-02-10T01:12:24.322159](https://huggingface.co/datasets/open-llm-leaderboard/details_BFauber__lora_llama2-13b_10e5_r128_a64/blob/main/results_2024-02-10T01-12-24.322159.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5547782267581419,\n\
\ \"acc_stderr\": 0.03364758987723506,\n \"acc_norm\": 0.5605657594957815,\n\
\ \"acc_norm_stderr\": 0.034369652344960734,\n \"mc1\": 0.2582619339045288,\n\
\ \"mc1_stderr\": 0.0153218216884762,\n \"mc2\": 0.37196197837880757,\n\
\ \"mc2_stderr\": 0.013661684990590431\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5639931740614335,\n \"acc_stderr\": 0.014491225699230916,\n\
\ \"acc_norm\": 0.590443686006826,\n \"acc_norm_stderr\": 0.014370358632472434\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6166102370045807,\n\
\ \"acc_stderr\": 0.004852182621274261,\n \"acc_norm\": 0.8227444732125074,\n\
\ \"acc_norm_stderr\": 0.00381104341202466\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5037037037037037,\n\
\ \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.5037037037037037,\n\
\ \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.5394736842105263,\n \"acc_stderr\": 0.04056242252249033,\n\
\ \"acc_norm\": 0.5394736842105263,\n \"acc_norm_stderr\": 0.04056242252249033\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.51,\n\
\ \"acc_stderr\": 0.05024183937956913,\n \"acc_norm\": 0.51,\n \
\ \"acc_norm_stderr\": 0.05024183937956913\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6415094339622641,\n \"acc_stderr\": 0.029514703583981765,\n\
\ \"acc_norm\": 0.6415094339622641,\n \"acc_norm_stderr\": 0.029514703583981765\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.625,\n\
\ \"acc_stderr\": 0.04048439222695598,\n \"acc_norm\": 0.625,\n \
\ \"acc_norm_stderr\": 0.04048439222695598\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \
\ \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n\
\ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5606936416184971,\n\
\ \"acc_stderr\": 0.037842719328874674,\n \"acc_norm\": 0.5606936416184971,\n\
\ \"acc_norm_stderr\": 0.037842719328874674\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.043364327079931785,\n\
\ \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.043364327079931785\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n\
\ \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.46382978723404256,\n \"acc_stderr\": 0.032600385118357715,\n\
\ \"acc_norm\": 0.46382978723404256,\n \"acc_norm_stderr\": 0.032600385118357715\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2982456140350877,\n\
\ \"acc_stderr\": 0.04303684033537314,\n \"acc_norm\": 0.2982456140350877,\n\
\ \"acc_norm_stderr\": 0.04303684033537314\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5310344827586206,\n \"acc_stderr\": 0.04158632762097828,\n\
\ \"acc_norm\": 0.5310344827586206,\n \"acc_norm_stderr\": 0.04158632762097828\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.31216931216931215,\n \"acc_stderr\": 0.023865206836972606,\n \"\
acc_norm\": 0.31216931216931215,\n \"acc_norm_stderr\": 0.023865206836972606\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.31746031746031744,\n\
\ \"acc_stderr\": 0.04163453031302859,\n \"acc_norm\": 0.31746031746031744,\n\
\ \"acc_norm_stderr\": 0.04163453031302859\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.6709677419354839,\n \"acc_stderr\": 0.026729499068349958,\n \"\
acc_norm\": 0.6709677419354839,\n \"acc_norm_stderr\": 0.026729499068349958\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.45320197044334976,\n \"acc_stderr\": 0.03502544650845872,\n \"\
acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.03502544650845872\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\"\
: 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.6484848484848484,\n \"acc_stderr\": 0.037282069986826503,\n\
\ \"acc_norm\": 0.6484848484848484,\n \"acc_norm_stderr\": 0.037282069986826503\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.6818181818181818,\n \"acc_stderr\": 0.0331847733384533,\n \"acc_norm\"\
: 0.6818181818181818,\n \"acc_norm_stderr\": 0.0331847733384533\n },\n\
\ \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \
\ \"acc\": 0.7927461139896373,\n \"acc_stderr\": 0.02925282329180363,\n\
\ \"acc_norm\": 0.7927461139896373,\n \"acc_norm_stderr\": 0.02925282329180363\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.5256410256410257,\n \"acc_stderr\": 0.02531764972644866,\n \
\ \"acc_norm\": 0.5256410256410257,\n \"acc_norm_stderr\": 0.02531764972644866\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028604,\n \
\ \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028604\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.5462184873949579,\n \"acc_stderr\": 0.03233943468182088,\n \
\ \"acc_norm\": 0.5462184873949579,\n \"acc_norm_stderr\": 0.03233943468182088\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"\
acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.7431192660550459,\n \"acc_stderr\": 0.01873249292834246,\n \"\
acc_norm\": 0.7431192660550459,\n \"acc_norm_stderr\": 0.01873249292834246\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.47685185185185186,\n \"acc_stderr\": 0.03406315360711507,\n \"\
acc_norm\": 0.47685185185185186,\n \"acc_norm_stderr\": 0.03406315360711507\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.7352941176470589,\n \"acc_stderr\": 0.030964517926923403,\n \"\
acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.030964517926923403\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7130801687763713,\n \"acc_stderr\": 0.02944377302259469,\n \
\ \"acc_norm\": 0.7130801687763713,\n \"acc_norm_stderr\": 0.02944377302259469\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6457399103139013,\n\
\ \"acc_stderr\": 0.032100621541349864,\n \"acc_norm\": 0.6457399103139013,\n\
\ \"acc_norm_stderr\": 0.032100621541349864\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n\
\ \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7355371900826446,\n \"acc_stderr\": 0.04026187527591207,\n \"\
acc_norm\": 0.7355371900826446,\n \"acc_norm_stderr\": 0.04026187527591207\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n\
\ \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n\
\ \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.6809815950920245,\n \"acc_stderr\": 0.03661997551073836,\n\
\ \"acc_norm\": 0.6809815950920245,\n \"acc_norm_stderr\": 0.03661997551073836\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.30357142857142855,\n\
\ \"acc_stderr\": 0.04364226155841044,\n \"acc_norm\": 0.30357142857142855,\n\
\ \"acc_norm_stderr\": 0.04364226155841044\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n\
\ \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7991452991452992,\n\
\ \"acc_stderr\": 0.026246772946890474,\n \"acc_norm\": 0.7991452991452992,\n\
\ \"acc_norm_stderr\": 0.026246772946890474\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.55,\n \"acc_stderr\": 0.04999999999999999,\n \
\ \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.04999999999999999\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7458492975734355,\n\
\ \"acc_stderr\": 0.015569254692045757,\n \"acc_norm\": 0.7458492975734355,\n\
\ \"acc_norm_stderr\": 0.015569254692045757\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.6213872832369942,\n \"acc_stderr\": 0.026113749361310345,\n\
\ \"acc_norm\": 0.6213872832369942,\n \"acc_norm_stderr\": 0.026113749361310345\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.27039106145251396,\n\
\ \"acc_stderr\": 0.014854993938010066,\n \"acc_norm\": 0.27039106145251396,\n\
\ \"acc_norm_stderr\": 0.014854993938010066\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.6405228758169934,\n \"acc_stderr\": 0.027475969910660952,\n\
\ \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.027475969910660952\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6527331189710611,\n\
\ \"acc_stderr\": 0.027040745502307336,\n \"acc_norm\": 0.6527331189710611,\n\
\ \"acc_norm_stderr\": 0.027040745502307336\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.026725868809100793,\n\
\ \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.026725868809100793\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.41134751773049644,\n \"acc_stderr\": 0.029354911159940985,\n \
\ \"acc_norm\": 0.41134751773049644,\n \"acc_norm_stderr\": 0.029354911159940985\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41916558018252936,\n\
\ \"acc_stderr\": 0.012602244505788233,\n \"acc_norm\": 0.41916558018252936,\n\
\ \"acc_norm_stderr\": 0.012602244505788233\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.5257352941176471,\n \"acc_stderr\": 0.03033257809455502,\n\
\ \"acc_norm\": 0.5257352941176471,\n \"acc_norm_stderr\": 0.03033257809455502\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.5522875816993464,\n \"acc_stderr\": 0.020116925347422425,\n \
\ \"acc_norm\": 0.5522875816993464,\n \"acc_norm_stderr\": 0.020116925347422425\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n\
\ \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \
\ \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.6244897959183674,\n \"acc_stderr\": 0.03100120903989484,\n\
\ \"acc_norm\": 0.6244897959183674,\n \"acc_norm_stderr\": 0.03100120903989484\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7263681592039801,\n\
\ \"acc_stderr\": 0.031524391865554016,\n \"acc_norm\": 0.7263681592039801,\n\
\ \"acc_norm_stderr\": 0.031524391865554016\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036625,\n \
\ \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036625\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.45180722891566266,\n\
\ \"acc_stderr\": 0.038743715565879536,\n \"acc_norm\": 0.45180722891566266,\n\
\ \"acc_norm_stderr\": 0.038743715565879536\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03188578017686398,\n\
\ \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03188578017686398\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2582619339045288,\n\
\ \"mc1_stderr\": 0.0153218216884762,\n \"mc2\": 0.37196197837880757,\n\
\ \"mc2_stderr\": 0.013661684990590431\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7671665351223362,\n \"acc_stderr\": 0.011878201073856544\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.23351023502653526,\n \
\ \"acc_stderr\": 0.011653286808791043\n }\n}\n```"
repo_url: https://huggingface.co/BFauber/lora_llama2-13b_10e5_r128_a64
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|arc:challenge|25_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|gsm8k|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hellaswag|10_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-10T01-12-24.322159.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-10T01-12-24.322159.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- '**/details_harness|winogrande|5_2024-02-10T01-12-24.322159.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-02-10T01-12-24.322159.parquet'
- config_name: results
data_files:
- split: 2024_02_10T01_12_24.322159
path:
- results_2024-02-10T01-12-24.322159.parquet
- split: latest
path:
- results_2024-02-10T01-12-24.322159.parquet
---
# Dataset Card for Evaluation run of BFauber/lora_llama2-13b_10e5_r128_a64
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [BFauber/lora_llama2-13b_10e5_r128_a64](https://huggingface.co/BFauber/lora_llama2-13b_10e5_r128_a64) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_BFauber__lora_llama2-13b_10e5_r128_a64",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-10T01:12:24.322159](https://huggingface.co/datasets/open-llm-leaderboard/details_BFauber__lora_llama2-13b_10e5_r128_a64/blob/main/results_2024-02-10T01-12-24.322159.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.5547782267581419,
"acc_stderr": 0.03364758987723506,
"acc_norm": 0.5605657594957815,
"acc_norm_stderr": 0.034369652344960734,
"mc1": 0.2582619339045288,
"mc1_stderr": 0.0153218216884762,
"mc2": 0.37196197837880757,
"mc2_stderr": 0.013661684990590431
},
"harness|arc:challenge|25": {
"acc": 0.5639931740614335,
"acc_stderr": 0.014491225699230916,
"acc_norm": 0.590443686006826,
"acc_norm_stderr": 0.014370358632472434
},
"harness|hellaswag|10": {
"acc": 0.6166102370045807,
"acc_stderr": 0.004852182621274261,
"acc_norm": 0.8227444732125074,
"acc_norm_stderr": 0.00381104341202466
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5037037037037037,
"acc_stderr": 0.04319223625811331,
"acc_norm": 0.5037037037037037,
"acc_norm_stderr": 0.04319223625811331
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.5394736842105263,
"acc_stderr": 0.04056242252249033,
"acc_norm": 0.5394736842105263,
"acc_norm_stderr": 0.04056242252249033
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956913,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956913
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6415094339622641,
"acc_stderr": 0.029514703583981765,
"acc_norm": 0.6415094339622641,
"acc_norm_stderr": 0.029514703583981765
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.625,
"acc_stderr": 0.04048439222695598,
"acc_norm": 0.625,
"acc_norm_stderr": 0.04048439222695598
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5606936416184971,
"acc_stderr": 0.037842719328874674,
"acc_norm": 0.5606936416184971,
"acc_norm_stderr": 0.037842719328874674
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.043364327079931785,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.043364327079931785
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.46382978723404256,
"acc_stderr": 0.032600385118357715,
"acc_norm": 0.46382978723404256,
"acc_norm_stderr": 0.032600385118357715
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.2982456140350877,
"acc_stderr": 0.04303684033537314,
"acc_norm": 0.2982456140350877,
"acc_norm_stderr": 0.04303684033537314
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5310344827586206,
"acc_stderr": 0.04158632762097828,
"acc_norm": 0.5310344827586206,
"acc_norm_stderr": 0.04158632762097828
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.31216931216931215,
"acc_stderr": 0.023865206836972606,
"acc_norm": 0.31216931216931215,
"acc_norm_stderr": 0.023865206836972606
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.31746031746031744,
"acc_stderr": 0.04163453031302859,
"acc_norm": 0.31746031746031744,
"acc_norm_stderr": 0.04163453031302859
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.6709677419354839,
"acc_stderr": 0.026729499068349958,
"acc_norm": 0.6709677419354839,
"acc_norm_stderr": 0.026729499068349958
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.45320197044334976,
"acc_stderr": 0.03502544650845872,
"acc_norm": 0.45320197044334976,
"acc_norm_stderr": 0.03502544650845872
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237102
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.6484848484848484,
"acc_stderr": 0.037282069986826503,
"acc_norm": 0.6484848484848484,
"acc_norm_stderr": 0.037282069986826503
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.6818181818181818,
"acc_stderr": 0.0331847733384533,
"acc_norm": 0.6818181818181818,
"acc_norm_stderr": 0.0331847733384533
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.7927461139896373,
"acc_stderr": 0.02925282329180363,
"acc_norm": 0.7927461139896373,
"acc_norm_stderr": 0.02925282329180363
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.5256410256410257,
"acc_stderr": 0.02531764972644866,
"acc_norm": 0.5256410256410257,
"acc_norm_stderr": 0.02531764972644866
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.32222222222222224,
"acc_stderr": 0.028493465091028604,
"acc_norm": 0.32222222222222224,
"acc_norm_stderr": 0.028493465091028604
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.5462184873949579,
"acc_stderr": 0.03233943468182088,
"acc_norm": 0.5462184873949579,
"acc_norm_stderr": 0.03233943468182088
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3443708609271523,
"acc_stderr": 0.038796870240733264,
"acc_norm": 0.3443708609271523,
"acc_norm_stderr": 0.038796870240733264
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7431192660550459,
"acc_stderr": 0.01873249292834246,
"acc_norm": 0.7431192660550459,
"acc_norm_stderr": 0.01873249292834246
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.47685185185185186,
"acc_stderr": 0.03406315360711507,
"acc_norm": 0.47685185185185186,
"acc_norm_stderr": 0.03406315360711507
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7352941176470589,
"acc_stderr": 0.030964517926923403,
"acc_norm": 0.7352941176470589,
"acc_norm_stderr": 0.030964517926923403
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7130801687763713,
"acc_stderr": 0.02944377302259469,
"acc_norm": 0.7130801687763713,
"acc_norm_stderr": 0.02944377302259469
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6457399103139013,
"acc_stderr": 0.032100621541349864,
"acc_norm": 0.6457399103139013,
"acc_norm_stderr": 0.032100621541349864
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6259541984732825,
"acc_stderr": 0.042438692422305246,
"acc_norm": 0.6259541984732825,
"acc_norm_stderr": 0.042438692422305246
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7355371900826446,
"acc_stderr": 0.04026187527591207,
"acc_norm": 0.7355371900826446,
"acc_norm_stderr": 0.04026187527591207
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7314814814814815,
"acc_stderr": 0.042844679680521934,
"acc_norm": 0.7314814814814815,
"acc_norm_stderr": 0.042844679680521934
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.6809815950920245,
"acc_stderr": 0.03661997551073836,
"acc_norm": 0.6809815950920245,
"acc_norm_stderr": 0.03661997551073836
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.30357142857142855,
"acc_stderr": 0.04364226155841044,
"acc_norm": 0.30357142857142855,
"acc_norm_stderr": 0.04364226155841044
},
"harness|hendrycksTest-management|5": {
"acc": 0.7669902912621359,
"acc_stderr": 0.04185832598928315,
"acc_norm": 0.7669902912621359,
"acc_norm_stderr": 0.04185832598928315
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.7991452991452992,
"acc_stderr": 0.026246772946890474,
"acc_norm": 0.7991452991452992,
"acc_norm_stderr": 0.026246772946890474
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.55,
"acc_stderr": 0.04999999999999999,
"acc_norm": 0.55,
"acc_norm_stderr": 0.04999999999999999
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7458492975734355,
"acc_stderr": 0.015569254692045757,
"acc_norm": 0.7458492975734355,
"acc_norm_stderr": 0.015569254692045757
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6213872832369942,
"acc_stderr": 0.026113749361310345,
"acc_norm": 0.6213872832369942,
"acc_norm_stderr": 0.026113749361310345
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.27039106145251396,
"acc_stderr": 0.014854993938010066,
"acc_norm": 0.27039106145251396,
"acc_norm_stderr": 0.014854993938010066
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6405228758169934,
"acc_stderr": 0.027475969910660952,
"acc_norm": 0.6405228758169934,
"acc_norm_stderr": 0.027475969910660952
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6527331189710611,
"acc_stderr": 0.027040745502307336,
"acc_norm": 0.6527331189710611,
"acc_norm_stderr": 0.027040745502307336
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.6388888888888888,
"acc_stderr": 0.026725868809100793,
"acc_norm": 0.6388888888888888,
"acc_norm_stderr": 0.026725868809100793
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.41134751773049644,
"acc_stderr": 0.029354911159940985,
"acc_norm": 0.41134751773049644,
"acc_norm_stderr": 0.029354911159940985
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.41916558018252936,
"acc_stderr": 0.012602244505788233,
"acc_norm": 0.41916558018252936,
"acc_norm_stderr": 0.012602244505788233
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.5257352941176471,
"acc_stderr": 0.03033257809455502,
"acc_norm": 0.5257352941176471,
"acc_norm_stderr": 0.03033257809455502
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.5522875816993464,
"acc_stderr": 0.020116925347422425,
"acc_norm": 0.5522875816993464,
"acc_norm_stderr": 0.020116925347422425
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6,
"acc_stderr": 0.0469237132203465,
"acc_norm": 0.6,
"acc_norm_stderr": 0.0469237132203465
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6244897959183674,
"acc_stderr": 0.03100120903989484,
"acc_norm": 0.6244897959183674,
"acc_norm_stderr": 0.03100120903989484
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.7263681592039801,
"acc_stderr": 0.031524391865554016,
"acc_norm": 0.7263681592039801,
"acc_norm_stderr": 0.031524391865554016
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.81,
"acc_stderr": 0.03942772444036625,
"acc_norm": 0.81,
"acc_norm_stderr": 0.03942772444036625
},
"harness|hendrycksTest-virology|5": {
"acc": 0.45180722891566266,
"acc_stderr": 0.038743715565879536,
"acc_norm": 0.45180722891566266,
"acc_norm_stderr": 0.038743715565879536
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.03188578017686398,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.03188578017686398
},
"harness|truthfulqa:mc|0": {
"mc1": 0.2582619339045288,
"mc1_stderr": 0.0153218216884762,
"mc2": 0.37196197837880757,
"mc2_stderr": 0.013661684990590431
},
"harness|winogrande|5": {
"acc": 0.7671665351223362,
"acc_stderr": 0.011878201073856544
},
"harness|gsm8k|5": {
"acc": 0.23351023502653526,
"acc_stderr": 0.011653286808791043
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
cboard-org/arasaac | ---
license: mit
---
|
gunz0007/indo | ---
license: apache-2.0
---
|
Hack90/ncbi_genbank_part_41 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: id
dtype: string
- name: sequence
dtype: string
- name: name
dtype: string
- name: description
dtype: string
- name: features
dtype: int64
- name: seq_length
dtype: int64
splits:
- name: train
num_bytes: 31049128200
num_examples: 273326
download_size: 13996445609
dataset_size: 31049128200
---
# Dataset Card for "ncbi_genbank_part_41"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
TPM-28/emotion-FR | ---
license: apache-2.0
task_categories:
- text-classification
language:
- fr
tags:
- multi-class-classification
size_categories:
- 10K<n<100K
--- |
M-A-D/ArabicDarija-xP3x | ---
dataset_info:
features:
- name: text
dtype: string
- name: language
dtype: string
- name: template
dtype: string
- name: dataset
dtype: string
splits:
- name: train
num_bytes: 308250831
num_examples: 1223481
download_size: 129951272
dataset_size: 308250831
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "ArabicDarija-xP3x" part of "xP3x" by [Muennighoff](https://huggingface.co/Muennighoff)
## Find below part of the original dataset card
## Dataset Description
- **Repository:** https://github.com/bigscience-workshop/xmtf
- **Paper:** [Crosslingual Generalization through Multitask Finetuning](https://arxiv.org/abs/2211.01786)
- **Point of Contact:** [Niklas Muennighoff](mailto:n.muennighoff@gmail.com)
### Dataset Summary
> xP3x (Crosslingual Public Pool of Prompts eXtended) is a collection of prompts & datasets across 277 languages & 16 NLP tasks. It contains all of xP3 + much more! It is used for training future contenders of mT0 & BLOOMZ at project Aya @[C4AI](https://cohere.for.ai/) 🧡
>
- **Creation:** The dataset can be recreated using instructions available [here](https://github.com/bigscience-workshop/xmtf#create-xp3) together with the file in this repository named `xp3x_create.py`. We provide this version to save processing time.
- **Languages:** 277
- **xP3 Dataset Family:**
<table>
<tr>
<th>Name</th>
<th>Explanation</th>
<th>Example models</th>
</tr>
<tr>
<td><a href=https://huggingface.co/datasets/Muennighoff/xP3x>xP3x</a></t>
<td>Mixture of 17 tasks in 277 languages with English prompts</td>
<td>WIP - Join us at Project Aya @<a href=https://cohere.for.ai/>C4AI</a> to help!</td>
</tr>
<tr>
<td><a href=https://huggingface.co/datasets/bigscience/xP3>xP3</a></t>
<td>Mixture of 13 training tasks in 46 languages with English prompts</td>
<td><a href=https://huggingface.co/bigscience/bloomz>bloomz</a> & <a href=https://huggingface.co/bigscience/mt0-xxl>mt0-xxl</a></td>
</tr>
<tr>
<td><a href=https://huggingface.co/datasets/bigscience/xP3mt>xP3mt</a></t>
<td>Mixture of 13 training tasks in 46 languages with prompts in 20 languages (machine-translated from English)</td>
<td><a href=https://huggingface.co/bigscience/bloomz-mt>bloomz-mt</a> & <a href=https://huggingface.co/bigscience/mt0-xxl-mt>mt0-xxl-mt</a></td>
</tr>
<tr>
<td><a href=https://huggingface.co/datasets/bigscience/xP3all>xP3all</a></t>
<td>xP3 + evaluation datasets adding an additional 3 tasks for a total of 16 tasks in 46 languages with English prompts</td>
<td></td>
</tr>
<tr>
<td><a href=https://huggingface.co/datasets/bigscience/xP3megds>xP3megds</a></t>
<td><a href=https://github.com/bigscience-workshop/Megatron-DeepSpeed>Megatron-DeepSpeed</a> processed version of xP3</td>
<td><a href=https://huggingface.co/bigscience/bloomz>bloomz</a></td>
</tr>
<tr>
<td><a href=https://huggingface.co/datasets/Muennighoff/P3>P3</a></t>
<td>Repreprocessed version of the English-only <a href=https://huggingface.co/datasets/bigscience/P3>P3</a> with 8 training tasks</td>
<td><a href=https://huggingface.co/bigscience/bloomz-p3>bloomz-p3</a> & <a href=https://huggingface.co/bigscience/mt0-xxl-p3>mt0-xxl-p3</a></td>
</tr>
</table>
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
vikenkd/mini-python_code_instructions_research | ---
license: mit
dataset_info:
features:
- name: Instruction
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 2766589.5
num_examples: 750
- name: test
num_bytes: 922196.5
num_examples: 250
download_size: 1612042
dataset_size: 3688786.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
freshpearYoon/v3_val_free_3 | ---
dataset_info:
features:
- name: input_features
sequence:
sequence: float32
- name: labels
sequence: int64
splits:
- name: train
num_bytes: 15366825976
num_examples: 10000
download_size: 2259862247
dataset_size: 15366825976
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
smithclay/nortonstories | ---
license: apache-2.0
dataset_info:
features:
- name: text
dtype: string
- name: year
dtype: string
- name: intelligibility
dtype: float64
splits:
- name: train
num_bytes: 1122541753.1327753
num_examples: 780578
download_size: 801736082
dataset_size: 1122541753.1327753
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
DTU54DL/common-native | ---
annotations_creators:
- expert-generated
language:
- en
language_creators:
- found
license:
- mit
multilinguality:
- monolingual
paperswithcode_id: acronym-identification
pretty_name: Acronym Identification Dataset
size_categories:
- 10K<n<100K
source_datasets:
- original
task_categories:
- token-classification
task_ids:
- token-classification-other-acronym-identification
train-eval-index:
- col_mapping:
labels: tags
tokens: tokens
config: default
splits:
eval_split: test
task: token-classification
task_id: entity_extraction
dataset_info:
features:
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: sentence
dtype: string
- name: accent
dtype: string
splits:
- name: train
num_bytes: 419902426.3910719
num_examples: 10000
- name: test
num_bytes: 41430604.33704293
num_examples: 994
download_size: 440738761
dataset_size: 461333030.72811484
---
# Dataset Card for [Dataset Name]
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:**
- **Repository:**
- **Paper:**
- **Leaderboard:**
- **Point of Contact:**
### Dataset Summary
[More Information Needed]
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
Thanks to [@github-username](https://github.com/<github-username>) for adding this dataset. |
xed_en_fi | ---
annotations_creators:
- expert-generated
language_creators:
- found
language:
- en
- fi
license:
- cc-by-4.0
multilinguality:
- multilingual
size_categories:
- 10K<n<100K
- 1K<n<10K
source_datasets:
- extended|other-OpenSubtitles2016
task_categories:
- text-classification
task_ids:
- intent-classification
- multi-class-classification
- multi-label-classification
- sentiment-classification
paperswithcode_id: xed
pretty_name: XedEnglishFinnish
dataset_info:
- config_name: en_annotated
features:
- name: sentence
dtype: string
- name: labels
sequence:
class_label:
names:
'0': neutral
'1': anger
'2': anticipation
'3': disgust
'4': fear
'5': joy
'6': sadness
'7': surprise
'8': trust
splits:
- name: train
num_bytes: 1018485
num_examples: 17528
download_size: 2421235
dataset_size: 1018485
- config_name: en_neutral
features:
- name: sentence
dtype: string
- name: labels
dtype:
class_label:
names:
'0': neutral
'1': anger
'2': anticipation
'3': disgust
'4': fear
'5': joy
'6': sadness
'7': surprise
'8': trust
splits:
- name: train
num_bytes: 401129
num_examples: 9675
download_size: 2421235
dataset_size: 401129
- config_name: fi_annotated
features:
- name: sentence
dtype: string
- name: labels
sequence:
class_label:
names:
'0': neutral
'1': anger
'2': anticipation
'3': disgust
'4': fear
'5': joy
'6': sadness
'7': surprise
'8': trust
splits:
- name: train
num_bytes: 756224
num_examples: 14449
download_size: 2421235
dataset_size: 756224
- config_name: fi_neutral
features:
- name: sentence
dtype: string
- name: labels
dtype:
class_label:
names:
'0': neutral
'1': anger
'2': anticipation
'3': disgust
'4': fear
'5': joy
'6': sadness
'7': surprise
'8': trust
splits:
- name: train
num_bytes: 427499
num_examples: 10794
download_size: 2421235
dataset_size: 427499
config_names:
- en_annotated
- en_neutral
- fi_annotated
- fi_neutral
---
# Dataset Card for xed_english_finnish
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:**
- **Repository:** [Github](https://github.com/Helsinki-NLP/XED)
- **Paper:** [Arxiv](https://arxiv.org/abs/2011.01612)
- **Leaderboard:**
- **Point of Contact:**
### Dataset Summary
This is the XED dataset. The dataset consists of emotion annotated movie subtitles from OPUS. We use Plutchik's 8 core emotions to annotate. The data is multilabel. The original annotations have been sourced for mainly English and Finnish.
For the English data we used Stanford NER (named entity recognition) (Finkel et al., 2005) to replace names and locations with the tags: [PERSON] and [LOCATION] respectively.
For the Finnish data, we replaced names and locations using the Turku NER corpus (Luoma et al., 2020).
### Supported Tasks and Leaderboards
Sentiment Classification, Multilabel Classification, Multilabel Classification, Intent Classification
### Languages
English, Finnish
## Dataset Structure
### Data Instances
```
{ "sentence": "A confession that you hired [PERSON] ... and are responsible for my father's murder."
"labels": [1, 6] # anger, sadness
}
```
### Data Fields
- sentence: a line from the dataset
- labels: labels corresponding to the emotion as an integer
Where the number indicates the emotion in ascending alphabetical order: anger:1, anticipation:2, disgust:3, fear:4, joy:5, sadness:6, surprise:7, trust:8, with neutral:0 where applicable.
### Data Splits
For English:
Number of unique data points: 17528 ('en_annotated' config) + 9675 ('en_neutral' config)
Number of emotions: 8 (+neutral)
For Finnish:
Number of unique data points: 14449 ('fi_annotated' config) + 10794 ('fi_neutral' config)
Number of emotions: 8 (+neutral)
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
License: Creative Commons Attribution 4.0 International License (CC-BY)
### Citation Information
@inproceedings{ohman2020xed,
title={XED: A Multilingual Dataset for Sentiment Analysis and Emotion Detection},
author={{\"O}hman, Emily and P{\`a}mies, Marc and Kajava, Kaisla and Tiedemann, J{\"o}rg},
booktitle={The 28th International Conference on Computational Linguistics (COLING 2020)},
year={2020}
}
### Contributions
Thanks to [@lhoestq](https://github.com/lhoestq), [@harshalmittal4](https://github.com/harshalmittal4) for adding this dataset. |
open-llm-leaderboard/details_JunchengXie__zephyr-7b-beta-gpt-4-80k | ---
pretty_name: Evaluation run of JunchengXie/zephyr-7b-beta-gpt-4-80k
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [JunchengXie/zephyr-7b-beta-gpt-4-80k](https://huggingface.co/JunchengXie/zephyr-7b-beta-gpt-4-80k)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_JunchengXie__zephyr-7b-beta-gpt-4-80k\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-03-27T23:53:43.783934](https://huggingface.co/datasets/open-llm-leaderboard/details_JunchengXie__zephyr-7b-beta-gpt-4-80k/blob/main/results_2024-03-27T23-53-43.783934.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.602574862684413,\n\
\ \"acc_stderr\": 0.03340581138036985,\n \"acc_norm\": 0.6098452767259759,\n\
\ \"acc_norm_stderr\": 0.0341057144449039,\n \"mc1\": 0.4186046511627907,\n\
\ \"mc1_stderr\": 0.01727001528447685,\n \"mc2\": 0.584043219666108,\n\
\ \"mc2_stderr\": 0.015523976744619418\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5802047781569966,\n \"acc_stderr\": 0.014422181226303026,\n\
\ \"acc_norm\": 0.6083617747440273,\n \"acc_norm_stderr\": 0.014264122124938213\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5879306910973909,\n\
\ \"acc_stderr\": 0.004912015369160072,\n \"acc_norm\": 0.7907787293367855,\n\
\ \"acc_norm_stderr\": 0.004059213774735556\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \
\ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n\
\ \"acc_stderr\": 0.04218506215368881,\n \"acc_norm\": 0.6074074074074074,\n\
\ \"acc_norm_stderr\": 0.04218506215368881\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6381578947368421,\n \"acc_stderr\": 0.03910525752849724,\n\
\ \"acc_norm\": 0.6381578947368421,\n \"acc_norm_stderr\": 0.03910525752849724\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.51,\n\
\ \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \
\ \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.028727502957880267,\n\
\ \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880267\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7152777777777778,\n\
\ \"acc_stderr\": 0.037738099906869334,\n \"acc_norm\": 0.7152777777777778,\n\
\ \"acc_norm_stderr\": 0.037738099906869334\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \
\ \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n\
\ \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \
\ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6127167630057804,\n\
\ \"acc_stderr\": 0.03714325906302065,\n \"acc_norm\": 0.6127167630057804,\n\
\ \"acc_norm_stderr\": 0.03714325906302065\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n\
\ \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.72,\n \"acc_stderr\": 0.04512608598542129,\n \"acc_norm\": 0.72,\n\
\ \"acc_norm_stderr\": 0.04512608598542129\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5191489361702127,\n \"acc_stderr\": 0.03266204299064678,\n\
\ \"acc_norm\": 0.5191489361702127,\n \"acc_norm_stderr\": 0.03266204299064678\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n\
\ \"acc_stderr\": 0.04657047260594962,\n \"acc_norm\": 0.4298245614035088,\n\
\ \"acc_norm_stderr\": 0.04657047260594962\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n\
\ \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.38095238095238093,\n \"acc_stderr\": 0.0250107491161376,\n \"\
acc_norm\": 0.38095238095238093,\n \"acc_norm_stderr\": 0.0250107491161376\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.373015873015873,\n\
\ \"acc_stderr\": 0.04325506042017086,\n \"acc_norm\": 0.373015873015873,\n\
\ \"acc_norm_stderr\": 0.04325506042017086\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \
\ \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n\
\ \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n\
\ \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.47783251231527096,\n \"acc_stderr\": 0.03514528562175008,\n\
\ \"acc_norm\": 0.47783251231527096,\n \"acc_norm_stderr\": 0.03514528562175008\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\"\
: 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7212121212121212,\n \"acc_stderr\": 0.03501438706296781,\n\
\ \"acc_norm\": 0.7212121212121212,\n \"acc_norm_stderr\": 0.03501438706296781\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"\
acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8341968911917098,\n \"acc_stderr\": 0.026839845022314415,\n\
\ \"acc_norm\": 0.8341968911917098,\n \"acc_norm_stderr\": 0.026839845022314415\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6,\n \"acc_stderr\": 0.024838811988033165,\n \"acc_norm\"\
: 0.6,\n \"acc_norm_stderr\": 0.024838811988033165\n },\n \"harness|hendrycksTest-high_school_mathematics|5\"\
: {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.029116617606083004,\n\
\ \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.029116617606083004\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.7016806722689075,\n \"acc_stderr\": 0.02971914287634286,\n \
\ \"acc_norm\": 0.7016806722689075,\n \"acc_norm_stderr\": 0.02971914287634286\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"\
acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8055045871559633,\n \"acc_stderr\": 0.016970289090458033,\n \"\
acc_norm\": 0.8055045871559633,\n \"acc_norm_stderr\": 0.016970289090458033\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5601851851851852,\n \"acc_stderr\": 0.0338517797604481,\n \"acc_norm\"\
: 0.5601851851851852,\n \"acc_norm_stderr\": 0.0338517797604481\n },\n\
\ \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7549019607843137,\n\
\ \"acc_stderr\": 0.030190282453501943,\n \"acc_norm\": 0.7549019607843137,\n\
\ \"acc_norm_stderr\": 0.030190282453501943\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\
: {\n \"acc\": 0.7046413502109705,\n \"acc_stderr\": 0.02969633871342288,\n\
\ \"acc_norm\": 0.7046413502109705,\n \"acc_norm_stderr\": 0.02969633871342288\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6143497757847534,\n\
\ \"acc_stderr\": 0.03266842214289201,\n \"acc_norm\": 0.6143497757847534,\n\
\ \"acc_norm_stderr\": 0.03266842214289201\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7175572519083969,\n \"acc_stderr\": 0.03948406125768361,\n\
\ \"acc_norm\": 0.7175572519083969,\n \"acc_norm_stderr\": 0.03948406125768361\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.71900826446281,\n \"acc_stderr\": 0.04103203830514512,\n \"acc_norm\"\
: 0.71900826446281,\n \"acc_norm_stderr\": 0.04103203830514512\n },\n\
\ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \
\ \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \
\ \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.6809815950920245,\n \"acc_stderr\": 0.03661997551073836,\n\
\ \"acc_norm\": 0.6809815950920245,\n \"acc_norm_stderr\": 0.03661997551073836\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.38392857142857145,\n\
\ \"acc_stderr\": 0.04616143075028547,\n \"acc_norm\": 0.38392857142857145,\n\
\ \"acc_norm_stderr\": 0.04616143075028547\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n\
\ \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8205128205128205,\n\
\ \"acc_stderr\": 0.02514093595033543,\n \"acc_norm\": 0.8205128205128205,\n\
\ \"acc_norm_stderr\": 0.02514093595033543\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7777777777777778,\n\
\ \"acc_stderr\": 0.014866821664709583,\n \"acc_norm\": 0.7777777777777778,\n\
\ \"acc_norm_stderr\": 0.014866821664709583\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.6907514450867052,\n \"acc_stderr\": 0.02488314057007176,\n\
\ \"acc_norm\": 0.6907514450867052,\n \"acc_norm_stderr\": 0.02488314057007176\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.36201117318435755,\n\
\ \"acc_stderr\": 0.016073067350153087,\n \"acc_norm\": 0.36201117318435755,\n\
\ \"acc_norm_stderr\": 0.016073067350153087\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.6862745098039216,\n \"acc_stderr\": 0.02656892101545715,\n\
\ \"acc_norm\": 0.6862745098039216,\n \"acc_norm_stderr\": 0.02656892101545715\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6752411575562701,\n\
\ \"acc_stderr\": 0.026596782287697043,\n \"acc_norm\": 0.6752411575562701,\n\
\ \"acc_norm_stderr\": 0.026596782287697043\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.6728395061728395,\n \"acc_stderr\": 0.026105673861409825,\n\
\ \"acc_norm\": 0.6728395061728395,\n \"acc_norm_stderr\": 0.026105673861409825\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4326241134751773,\n \"acc_stderr\": 0.02955545423677885,\n \
\ \"acc_norm\": 0.4326241134751773,\n \"acc_norm_stderr\": 0.02955545423677885\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41264667535853977,\n\
\ \"acc_stderr\": 0.012573836633799016,\n \"acc_norm\": 0.41264667535853977,\n\
\ \"acc_norm_stderr\": 0.012573836633799016\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6691176470588235,\n \"acc_stderr\": 0.02858270975389845,\n\
\ \"acc_norm\": 0.6691176470588235,\n \"acc_norm_stderr\": 0.02858270975389845\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6356209150326797,\n \"acc_stderr\": 0.019469518221573695,\n \
\ \"acc_norm\": 0.6356209150326797,\n \"acc_norm_stderr\": 0.019469518221573695\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n\
\ \"acc_stderr\": 0.046075820907199756,\n \"acc_norm\": 0.6363636363636364,\n\
\ \"acc_norm_stderr\": 0.046075820907199756\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.6530612244897959,\n \"acc_stderr\": 0.030472526026726496,\n\
\ \"acc_norm\": 0.6530612244897959,\n \"acc_norm_stderr\": 0.030472526026726496\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7910447761194029,\n\
\ \"acc_stderr\": 0.028748298931728655,\n \"acc_norm\": 0.7910447761194029,\n\
\ \"acc_norm_stderr\": 0.028748298931728655\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \
\ \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4939759036144578,\n\
\ \"acc_stderr\": 0.03892212195333045,\n \"acc_norm\": 0.4939759036144578,\n\
\ \"acc_norm_stderr\": 0.03892212195333045\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.030611116557432528,\n\
\ \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.030611116557432528\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4186046511627907,\n\
\ \"mc1_stderr\": 0.01727001528447685,\n \"mc2\": 0.584043219666108,\n\
\ \"mc2_stderr\": 0.015523976744619418\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7403314917127072,\n \"acc_stderr\": 0.012322700705552669\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.266868840030326,\n \
\ \"acc_stderr\": 0.012183780551887962\n }\n}\n```"
repo_url: https://huggingface.co/JunchengXie/zephyr-7b-beta-gpt-4-80k
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|arc:challenge|25_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|gsm8k|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hellaswag|10_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-27T23-53-43.783934.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-27T23-53-43.783934.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- '**/details_harness|winogrande|5_2024-03-27T23-53-43.783934.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-03-27T23-53-43.783934.parquet'
- config_name: results
data_files:
- split: 2024_03_27T23_53_43.783934
path:
- results_2024-03-27T23-53-43.783934.parquet
- split: latest
path:
- results_2024-03-27T23-53-43.783934.parquet
---
# Dataset Card for Evaluation run of JunchengXie/zephyr-7b-beta-gpt-4-80k
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [JunchengXie/zephyr-7b-beta-gpt-4-80k](https://huggingface.co/JunchengXie/zephyr-7b-beta-gpt-4-80k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_JunchengXie__zephyr-7b-beta-gpt-4-80k",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-03-27T23:53:43.783934](https://huggingface.co/datasets/open-llm-leaderboard/details_JunchengXie__zephyr-7b-beta-gpt-4-80k/blob/main/results_2024-03-27T23-53-43.783934.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.602574862684413,
"acc_stderr": 0.03340581138036985,
"acc_norm": 0.6098452767259759,
"acc_norm_stderr": 0.0341057144449039,
"mc1": 0.4186046511627907,
"mc1_stderr": 0.01727001528447685,
"mc2": 0.584043219666108,
"mc2_stderr": 0.015523976744619418
},
"harness|arc:challenge|25": {
"acc": 0.5802047781569966,
"acc_stderr": 0.014422181226303026,
"acc_norm": 0.6083617747440273,
"acc_norm_stderr": 0.014264122124938213
},
"harness|hellaswag|10": {
"acc": 0.5879306910973909,
"acc_stderr": 0.004912015369160072,
"acc_norm": 0.7907787293367855,
"acc_norm_stderr": 0.004059213774735556
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6074074074074074,
"acc_stderr": 0.04218506215368881,
"acc_norm": 0.6074074074074074,
"acc_norm_stderr": 0.04218506215368881
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6381578947368421,
"acc_stderr": 0.03910525752849724,
"acc_norm": 0.6381578947368421,
"acc_norm_stderr": 0.03910525752849724
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.51,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.51,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6792452830188679,
"acc_stderr": 0.028727502957880267,
"acc_norm": 0.6792452830188679,
"acc_norm_stderr": 0.028727502957880267
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7152777777777778,
"acc_stderr": 0.037738099906869334,
"acc_norm": 0.7152777777777778,
"acc_norm_stderr": 0.037738099906869334
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145633,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145633
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6127167630057804,
"acc_stderr": 0.03714325906302065,
"acc_norm": 0.6127167630057804,
"acc_norm_stderr": 0.03714325906302065
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.43137254901960786,
"acc_stderr": 0.04928099597287534,
"acc_norm": 0.43137254901960786,
"acc_norm_stderr": 0.04928099597287534
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.72,
"acc_stderr": 0.04512608598542129,
"acc_norm": 0.72,
"acc_norm_stderr": 0.04512608598542129
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5191489361702127,
"acc_stderr": 0.03266204299064678,
"acc_norm": 0.5191489361702127,
"acc_norm_stderr": 0.03266204299064678
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4298245614035088,
"acc_stderr": 0.04657047260594962,
"acc_norm": 0.4298245614035088,
"acc_norm_stderr": 0.04657047260594962
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5448275862068965,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.5448275862068965,
"acc_norm_stderr": 0.04149886942192117
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.38095238095238093,
"acc_stderr": 0.0250107491161376,
"acc_norm": 0.38095238095238093,
"acc_norm_stderr": 0.0250107491161376
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.373015873015873,
"acc_stderr": 0.04325506042017086,
"acc_norm": 0.373015873015873,
"acc_norm_stderr": 0.04325506042017086
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7677419354838709,
"acc_stderr": 0.024022256130308235,
"acc_norm": 0.7677419354838709,
"acc_norm_stderr": 0.024022256130308235
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.47783251231527096,
"acc_stderr": 0.03514528562175008,
"acc_norm": 0.47783251231527096,
"acc_norm_stderr": 0.03514528562175008
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.64,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.64,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7212121212121212,
"acc_stderr": 0.03501438706296781,
"acc_norm": 0.7212121212121212,
"acc_norm_stderr": 0.03501438706296781
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.03173071239071724,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.03173071239071724
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8341968911917098,
"acc_stderr": 0.026839845022314415,
"acc_norm": 0.8341968911917098,
"acc_norm_stderr": 0.026839845022314415
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6,
"acc_stderr": 0.024838811988033165,
"acc_norm": 0.6,
"acc_norm_stderr": 0.024838811988033165
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.35185185185185186,
"acc_stderr": 0.029116617606083004,
"acc_norm": 0.35185185185185186,
"acc_norm_stderr": 0.029116617606083004
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.7016806722689075,
"acc_stderr": 0.02971914287634286,
"acc_norm": 0.7016806722689075,
"acc_norm_stderr": 0.02971914287634286
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.33774834437086093,
"acc_stderr": 0.03861557546255169,
"acc_norm": 0.33774834437086093,
"acc_norm_stderr": 0.03861557546255169
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8055045871559633,
"acc_stderr": 0.016970289090458033,
"acc_norm": 0.8055045871559633,
"acc_norm_stderr": 0.016970289090458033
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5601851851851852,
"acc_stderr": 0.0338517797604481,
"acc_norm": 0.5601851851851852,
"acc_norm_stderr": 0.0338517797604481
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7549019607843137,
"acc_stderr": 0.030190282453501943,
"acc_norm": 0.7549019607843137,
"acc_norm_stderr": 0.030190282453501943
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7046413502109705,
"acc_stderr": 0.02969633871342288,
"acc_norm": 0.7046413502109705,
"acc_norm_stderr": 0.02969633871342288
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6143497757847534,
"acc_stderr": 0.03266842214289201,
"acc_norm": 0.6143497757847534,
"acc_norm_stderr": 0.03266842214289201
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7175572519083969,
"acc_stderr": 0.03948406125768361,
"acc_norm": 0.7175572519083969,
"acc_norm_stderr": 0.03948406125768361
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.71900826446281,
"acc_stderr": 0.04103203830514512,
"acc_norm": 0.71900826446281,
"acc_norm_stderr": 0.04103203830514512
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.75,
"acc_stderr": 0.04186091791394607,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04186091791394607
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.6809815950920245,
"acc_stderr": 0.03661997551073836,
"acc_norm": 0.6809815950920245,
"acc_norm_stderr": 0.03661997551073836
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.38392857142857145,
"acc_stderr": 0.04616143075028547,
"acc_norm": 0.38392857142857145,
"acc_norm_stderr": 0.04616143075028547
},
"harness|hendrycksTest-management|5": {
"acc": 0.7281553398058253,
"acc_stderr": 0.044052680241409216,
"acc_norm": 0.7281553398058253,
"acc_norm_stderr": 0.044052680241409216
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8205128205128205,
"acc_stderr": 0.02514093595033543,
"acc_norm": 0.8205128205128205,
"acc_norm_stderr": 0.02514093595033543
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.014866821664709583,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.014866821664709583
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6907514450867052,
"acc_stderr": 0.02488314057007176,
"acc_norm": 0.6907514450867052,
"acc_norm_stderr": 0.02488314057007176
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.36201117318435755,
"acc_stderr": 0.016073067350153087,
"acc_norm": 0.36201117318435755,
"acc_norm_stderr": 0.016073067350153087
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6862745098039216,
"acc_stderr": 0.02656892101545715,
"acc_norm": 0.6862745098039216,
"acc_norm_stderr": 0.02656892101545715
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6752411575562701,
"acc_stderr": 0.026596782287697043,
"acc_norm": 0.6752411575562701,
"acc_norm_stderr": 0.026596782287697043
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.6728395061728395,
"acc_stderr": 0.026105673861409825,
"acc_norm": 0.6728395061728395,
"acc_norm_stderr": 0.026105673861409825
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4326241134751773,
"acc_stderr": 0.02955545423677885,
"acc_norm": 0.4326241134751773,
"acc_norm_stderr": 0.02955545423677885
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.41264667535853977,
"acc_stderr": 0.012573836633799016,
"acc_norm": 0.41264667535853977,
"acc_norm_stderr": 0.012573836633799016
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6691176470588235,
"acc_stderr": 0.02858270975389845,
"acc_norm": 0.6691176470588235,
"acc_norm_stderr": 0.02858270975389845
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6356209150326797,
"acc_stderr": 0.019469518221573695,
"acc_norm": 0.6356209150326797,
"acc_norm_stderr": 0.019469518221573695
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.046075820907199756,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.046075820907199756
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6530612244897959,
"acc_stderr": 0.030472526026726496,
"acc_norm": 0.6530612244897959,
"acc_norm_stderr": 0.030472526026726496
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.7910447761194029,
"acc_stderr": 0.028748298931728655,
"acc_norm": 0.7910447761194029,
"acc_norm_stderr": 0.028748298931728655
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-virology|5": {
"acc": 0.4939759036144578,
"acc_stderr": 0.03892212195333045,
"acc_norm": 0.4939759036144578,
"acc_norm_stderr": 0.03892212195333045
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8011695906432749,
"acc_stderr": 0.030611116557432528,
"acc_norm": 0.8011695906432749,
"acc_norm_stderr": 0.030611116557432528
},
"harness|truthfulqa:mc|0": {
"mc1": 0.4186046511627907,
"mc1_stderr": 0.01727001528447685,
"mc2": 0.584043219666108,
"mc2_stderr": 0.015523976744619418
},
"harness|winogrande|5": {
"acc": 0.7403314917127072,
"acc_stderr": 0.012322700705552669
},
"harness|gsm8k|5": {
"acc": 0.266868840030326,
"acc_stderr": 0.012183780551887962
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
Kimata/GPT_Driver_dataset | ---
dataset_info:
features:
- name: messages
list:
- name: role
dtype: string
- name: content
dtype: string
splits:
- name: train
num_bytes: 49269209
num_examples: 23388
- name: test
num_bytes: 10693369
num_examples: 5119
download_size: 7321936
dataset_size: 59962578
---
# Dataset Card for "GPT_Driver_dataset"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
kghanlon/processed_SOTUs_parliament_texts | ---
dataset_info:
features:
- name: input_ids
sequence: int32
- name: attention_mask
sequence: int8
- name: labels
sequence: int64
splits:
- name: train
num_bytes: 3612825824
num_examples: 2155624
download_size: 1049504347
dataset_size: 3612825824
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
skvarre/hogskoleprovet-ord-3k | ---
dataset_info:
features:
- name: metadata
dtype: string
- name: text
sequence: string
splits:
- name: train
num_bytes: 262260
num_examples: 3002
download_size: 99081
dataset_size: 262260
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
RIW/small-coco-wm_1_3 | ---
dataset_info:
features:
- name: image
dtype: image
- name: caption
dtype: string
- name: url
dtype: string
- name: key
dtype: string
- name: status
dtype: string
- name: error_message
dtype: 'null'
- name: width
dtype: int64
- name: height
dtype: int64
- name: original_width
dtype: int64
- name: original_height
dtype: int64
- name: exif
dtype: string
- name: sha256
dtype: string
splits:
- name: train
num_bytes: 1886028576.233
num_examples: 9989
- name: validation
num_bytes: 1865114007.604
num_examples: 9993
download_size: 888826269
dataset_size: 3751142583.837
---
# Dataset Card for "small-coco-wm_1_3"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
warleagle/1t_chat_bot_data | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 890558
num_examples: 2083
download_size: 398939
dataset_size: 890558
---
# Dataset Card for "1t_chat_bot_data"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
tyzhu/find_last_sent_train_50_eval_10_baseline | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
dataset_info:
features:
- name: inputs
dtype: string
- name: targets
dtype: string
- name: title
dtype: string
- name: context
dtype: string
splits:
- name: train
num_bytes: 80962
num_examples: 50
- name: validation
num_bytes: 15247
num_examples: 10
download_size: 0
dataset_size: 96209
---
# Dataset Card for "find_last_sent_train_50_eval_10_baseline"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
ximdeew/hiho_audio_test | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: sentence
dtype: string
splits:
- name: train
num_bytes: 185373604.0
num_examples: 149
download_size: 174626524
dataset_size: 185373604.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
lihongze8/instruc_1M | ---
license: llama2
---
|
heliosprime/twitter_dataset_1713193258 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 16516
num_examples: 45
download_size: 16801
dataset_size: 16516
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "twitter_dataset_1713193258"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
UrbanSyn/UrbanSyn | ---
license: cc-by-sa-4.0
task_categories:
- object-detection
- image-segmentation
- depth-estimation
language:
- en
tags:
- Urban Scenario
- Autonomous Driving
- Synthethic data
size_categories:
- 1K<n<10K
pretty_name: UrbanSyn
---
<img src="front_urbanSyn.png" width=100% />
# UrbanSyn Dataset
UrbanSyn is an open synthetic dataset featuring photorealistic driving scenes. It contains ground-truth annotations for semantic segmentation, scene depth, panoptic instance segmentation, and 2-D bounding boxes. Website [https://urbansyn.org](https://urbansyn.org)
## Overview
UrbanSyn is a diverse, compact, and photorealistic dataset that provides more than 7.5k synthetic annotated images. It was born to address the synth-to-real domain gap, contributing to unprecedented synthetic-only baselines used by domain adaptation (DA) methods.
**- Reduce the synth-to-real domain gap**
UrbanSyn dataset helps to reduce the domain gap by contributing to unprecedented synthetic-only baselines used by domain adaptation (DA) methods.
**- Ground-truth annotations**
UrbanSyn comes with photorealistic color images, per-pixel semantic segmentation, depth, instance panoptic segmentation, and 2-D bounding boxes.
**- Open for research and commercial purposes**
UrbanSyn may be used for research and commercial purposes. It is released publicly under the Creative Commons Attribution-Commercial-ShareAlike 4.0 license.
**- High-degree of photorealism**
UrbanSyn features highly realistic and curated driving scenarios leveraging procedurally-generated content and high-quality curated assets. To achieve UrbanSyn photorealism we leverage industry-standard unbiased path-tracing and AI-based denoising techniques.
## White Paper
**[[Arxiv]](https://arxiv.org/abs/2312.12176)**
When using or referring to the UrbanSyn dataset in your research, please cite our white paper:
```BibTeX
@misc{gomez2023one,
title={All for One, and One for All: UrbanSyn Dataset, the third Musketeer of Synthetic Driving Scenes},
author={Jose L. Gómez and Manuel Silva and Antonio Seoane and Agnès Borrás and Mario Noriega and Germán Ros and Jose A. Iglesias-Guitian and Antonio M. López},
year={2023},
eprint={2312.12176},
archivePrefix={arXiv},
primaryClass={cs.CV}
}
```
## Terms of Use
The UrbanSyn Dataset is provided by the Computer Vision Center (UAB) and CITIC (University of A Coruña).
UrbanSyn may be used for research and commercial purposes, and it is subject to the Creative Commons Attribution-Commercial-ShareAlike 4.0. A summary of the CC-BY-SA 4.0 licensing terms can be found **[[here]](https://creativecommons.org/licenses/by-sa/4.0/deed.en)**.
Due to constraints from our asset providers for UrbanSyn, we prohibit the use of generative AI technologies for reverse engineering any assets or creating content for stock media platforms based on the UrbanSyn dataset.
While we strive to generate precise data, all information is presented 'as is' without any express or implied warranties. We explicitly disclaim all representations and warranties regarding the validity, scope, accuracy, completeness, safety, or utility of the licensed content, including any implied warranties of merchantability, fitness for a particular purpose, or otherwise.
## Acknowledgements
Funded by Grant agreement PID2020-115734RB-C21 "SSL-ADA" and Grant agreement PID2020-115734RB-C22 "PGAS-ADA"
<img src="MICINN_Gob_AEI_1.jpg" width="300" />
## For more information about our team members and how to contact us, visit our website [https://urbansyn.org](https://urbansyn.org)
## Folder structure and content
- ```rgb```: contains RGB images with a resolution of 2048x1024 in PNG format.
- ```ss and ss_colour``` : contains the pixel-level semantic segmentation labels in grayscale (value = Class ID) and colour (value = Class RGB) respectively in PNG format. We follow the 19 training classes defined on Cityscapes:
| name | trainId | color |
|----------------------|---------|-----------------|
| 'road' | 0 | (128, 64,128) |
| 'sidewalk' | 1 | (244, 35,232) |
| 'building' | 2 | ( 70, 70, 70) |
| 'wall' | 3 | (102,102,156) |
| 'fence' | 4 | (190,153,153) |
| 'pole' | 5 | (153,153,153) |
| 'traffic light' | 6 | (250,170, 30) |
| 'traffic sign' | 7 | (220,220, 0) |
| 'vegetation' | 8 | (107,142, 35) |
| 'terrain' | 9 | (152,251,152) |
| 'sky' | 10 | ( 70,130,180) |
| 'person' | 11 | (220, 20, 60) |
| 'rider' | 12 | (255, 0, 0) |
| 'car' | 13 | ( 0, 0,142) |
| 'truck' | 14 | ( 0, 0, 70) |
| 'bus' | 15 | ( 0, 60,100) |
| 'train' | 16 | ( 0, 80,100) |
| 'motorcycle' | 17 | ( 0, 0,230) |
| 'bicycle' | 18 | (119, 11, 32) |
| 'unlabeled' | 19 | ( 0, 0, 0) |
- ```panoptic```: contains the instance segmentation of the dynamic objects of the image in PNG format. Each instance is codified using the RGB channels, where RG corresponds to the instance number and B to the class ID. Dynamic objects are Person, Rider, Car, Truck, Bus, Train, Motorcycle and Bicycle.
- ```bbox2D```: contains the 2D bounding boxes and Instances information for all the dynamic objects in the image up to 110 meters of distance from the camera and bigger than 150 pixels. We provide the annotations in a json file with the next structure:
- bbox: provides the bounding box size determined by the top left corner (xMin, yMin) and Bottom right corner (xMax, YMax).
- color: corresponds to the colour of the instance in the panoptic instance segmentation map inside panoptic folder.
- label: defines the class name
- occlusion_percentage: provides the occlusion percentatge of the object. Being 0 not occluded and 100 fully occluded.
- ```depth```: contains the depth map of the image in EXR format.
## Download locally with huggingface_hub library
- [Install huggingface_hub library](https://huggingface.co/docs/huggingface_hub/installation)
- You can download the dataset on Python this way:
``` from huggingface_hub import snapshot_download ```
``` snapshot_download(repo_id="UrbanSyn/UrbanSyn", repo_type="dataset") ```
- More information about how to download and additional options can be found [here](https://huggingface.co/docs/huggingface_hub/guides/download) |
zolak/twitter_dataset_1712986565 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 4695489
num_examples: 13504
download_size: 2371994
dataset_size: 4695489
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
iambestfeed/vnexpress_cluster_subset | ---
dataset_info:
features:
- name: label
dtype: string
- name: text
dtype: string
- name: embeddings
sequence: float64
- name: label-mapping
dtype: int64
splits:
- name: train
num_bytes: 63510854
num_examples: 10000
download_size: 65591860
dataset_size: 63510854
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/details_mychen76__openmixtral-6x7b-v2 | ---
pretty_name: Evaluation run of mychen76/openmixtral-6x7b-v2
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [mychen76/openmixtral-6x7b-v2](https://huggingface.co/mychen76/openmixtral-6x7b-v2)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mychen76__openmixtral-6x7b-v2\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-03-22T03:38:21.830640](https://huggingface.co/datasets/open-llm-leaderboard/details_mychen76__openmixtral-6x7b-v2/blob/main/results_2024-03-22T03-38-21.830640.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6549214653780983,\n\
\ \"acc_stderr\": 0.03192694712071088,\n \"acc_norm\": 0.6553162947426028,\n\
\ \"acc_norm_stderr\": 0.03257645266665692,\n \"mc1\": 0.47368421052631576,\n\
\ \"mc1_stderr\": 0.017479241161975526,\n \"mc2\": 0.651312479462047,\n\
\ \"mc2_stderr\": 0.015362786449252212\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6604095563139932,\n \"acc_stderr\": 0.013839039762820167,\n\
\ \"acc_norm\": 0.6851535836177475,\n \"acc_norm_stderr\": 0.013572657703084948\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6938856801433977,\n\
\ \"acc_stderr\": 0.00459935892090954,\n \"acc_norm\": 0.8674566819358693,\n\
\ \"acc_norm_stderr\": 0.003383875172670026\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n\
\ \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n\
\ \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.7171052631578947,\n \"acc_stderr\": 0.03665349695640767,\n\
\ \"acc_norm\": 0.7171052631578947,\n \"acc_norm_stderr\": 0.03665349695640767\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n\
\ \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \
\ \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.7132075471698113,\n \"acc_stderr\": 0.027834912527544067,\n\
\ \"acc_norm\": 0.7132075471698113,\n \"acc_norm_stderr\": 0.027834912527544067\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n\
\ \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n\
\ \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \
\ \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n\
\ \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n\
\ \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n\
\ \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.049135952012744975,\n\
\ \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.049135952012744975\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.76,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.76,\n\
\ \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n\
\ \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n\
\ \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \
\ \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n\
\ \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.41005291005291006,\n \"acc_stderr\": 0.025331202438944437,\n \"\
acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.025331202438944437\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.48412698412698413,\n\
\ \"acc_stderr\": 0.04469881854072606,\n \"acc_norm\": 0.48412698412698413,\n\
\ \"acc_norm_stderr\": 0.04469881854072606\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.7903225806451613,\n \"acc_stderr\": 0.023157879349083525,\n \"\
acc_norm\": 0.7903225806451613,\n \"acc_norm_stderr\": 0.023157879349083525\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.49261083743842365,\n \"acc_stderr\": 0.03517603540361008,\n \"\
acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.03517603540361008\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\"\
: 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n\
\ \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586815,\n \"\
acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586815\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n\
\ \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.023854795680971114,\n\
\ \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.023854795680971114\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3111111111111111,\n \"acc_stderr\": 0.02822644674968351,\n \
\ \"acc_norm\": 0.3111111111111111,\n \"acc_norm_stderr\": 0.02822644674968351\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.030066761582977927,\n\
\ \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.030066761582977927\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"\
acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8532110091743119,\n \"acc_stderr\": 0.015173141845126243,\n \"\
acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.015173141845126243\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5277777777777778,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\"\
: 0.5277777777777778,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n\
\ \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8529411764705882,\n\
\ \"acc_stderr\": 0.024857478080250434,\n \"acc_norm\": 0.8529411764705882,\n\
\ \"acc_norm_stderr\": 0.024857478080250434\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\
: {\n \"acc\": 0.8143459915611815,\n \"acc_stderr\": 0.025310495376944867,\n\
\ \"acc_norm\": 0.8143459915611815,\n \"acc_norm_stderr\": 0.025310495376944867\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n\
\ \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n\
\ \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n\
\ \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"\
acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n\
\ \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n\
\ \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n\
\ \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n\
\ \"acc_stderr\": 0.047184714852195886,\n \"acc_norm\": 0.44642857142857145,\n\
\ \"acc_norm_stderr\": 0.047184714852195886\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n\
\ \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n\
\ \"acc_stderr\": 0.021262719400406957,\n \"acc_norm\": 0.8803418803418803,\n\
\ \"acc_norm_stderr\": 0.021262719400406957\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \
\ \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8390804597701149,\n\
\ \"acc_stderr\": 0.013140225515611729,\n \"acc_norm\": 0.8390804597701149,\n\
\ \"acc_norm_stderr\": 0.013140225515611729\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7312138728323699,\n \"acc_stderr\": 0.023868003262500104,\n\
\ \"acc_norm\": 0.7312138728323699,\n \"acc_norm_stderr\": 0.023868003262500104\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4424581005586592,\n\
\ \"acc_stderr\": 0.016611393687268577,\n \"acc_norm\": 0.4424581005586592,\n\
\ \"acc_norm_stderr\": 0.016611393687268577\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.02555316999182652,\n\
\ \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.02555316999182652\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n\
\ \"acc_stderr\": 0.025670259242188926,\n \"acc_norm\": 0.7138263665594855,\n\
\ \"acc_norm_stderr\": 0.025670259242188926\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.0239935017090421,\n\
\ \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.0239935017090421\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \
\ \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4530638852672751,\n\
\ \"acc_stderr\": 0.01271384597235898,\n \"acc_norm\": 0.4530638852672751,\n\
\ \"acc_norm_stderr\": 0.01271384597235898\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6985294117647058,\n \"acc_stderr\": 0.027875982114273168,\n\
\ \"acc_norm\": 0.6985294117647058,\n \"acc_norm_stderr\": 0.027875982114273168\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6666666666666666,\n \"acc_stderr\": 0.019070985589687495,\n \
\ \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.019070985589687495\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n\
\ \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n\
\ \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.028920583220675606,\n\
\ \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.028920583220675606\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n\
\ \"acc_stderr\": 0.025538433368578337,\n \"acc_norm\": 0.845771144278607,\n\
\ \"acc_norm_stderr\": 0.025538433368578337\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \
\ \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n\
\ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n\
\ \"acc_stderr\": 0.03882310850890594,\n \"acc_norm\": 0.536144578313253,\n\
\ \"acc_norm_stderr\": 0.03882310850890594\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640044,\n\
\ \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640044\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.47368421052631576,\n\
\ \"mc1_stderr\": 0.017479241161975526,\n \"mc2\": 0.651312479462047,\n\
\ \"mc2_stderr\": 0.015362786449252212\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7987371744277821,\n \"acc_stderr\": 0.01126851997157768\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.686125852918878,\n \
\ \"acc_stderr\": 0.012782681251053205\n }\n}\n```"
repo_url: https://huggingface.co/mychen76/openmixtral-6x7b-v2
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|arc:challenge|25_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|gsm8k|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hellaswag|10_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-22T03-38-21.830640.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-22T03-38-21.830640.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- '**/details_harness|winogrande|5_2024-03-22T03-38-21.830640.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-03-22T03-38-21.830640.parquet'
- config_name: results
data_files:
- split: 2024_03_22T03_38_21.830640
path:
- results_2024-03-22T03-38-21.830640.parquet
- split: latest
path:
- results_2024-03-22T03-38-21.830640.parquet
---
# Dataset Card for Evaluation run of mychen76/openmixtral-6x7b-v2
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [mychen76/openmixtral-6x7b-v2](https://huggingface.co/mychen76/openmixtral-6x7b-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_mychen76__openmixtral-6x7b-v2",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-03-22T03:38:21.830640](https://huggingface.co/datasets/open-llm-leaderboard/details_mychen76__openmixtral-6x7b-v2/blob/main/results_2024-03-22T03-38-21.830640.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6549214653780983,
"acc_stderr": 0.03192694712071088,
"acc_norm": 0.6553162947426028,
"acc_norm_stderr": 0.03257645266665692,
"mc1": 0.47368421052631576,
"mc1_stderr": 0.017479241161975526,
"mc2": 0.651312479462047,
"mc2_stderr": 0.015362786449252212
},
"harness|arc:challenge|25": {
"acc": 0.6604095563139932,
"acc_stderr": 0.013839039762820167,
"acc_norm": 0.6851535836177475,
"acc_norm_stderr": 0.013572657703084948
},
"harness|hellaswag|10": {
"acc": 0.6938856801433977,
"acc_stderr": 0.00459935892090954,
"acc_norm": 0.8674566819358693,
"acc_norm_stderr": 0.003383875172670026
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6518518518518519,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.6518518518518519,
"acc_norm_stderr": 0.041153246103369526
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7171052631578947,
"acc_stderr": 0.03665349695640767,
"acc_norm": 0.7171052631578947,
"acc_norm_stderr": 0.03665349695640767
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.65,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.65,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7132075471698113,
"acc_stderr": 0.027834912527544067,
"acc_norm": 0.7132075471698113,
"acc_norm_stderr": 0.027834912527544067
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.03476590104304134,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.03476590104304134
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956911,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956911
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6647398843930635,
"acc_stderr": 0.03599586301247077,
"acc_norm": 0.6647398843930635,
"acc_norm_stderr": 0.03599586301247077
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4215686274509804,
"acc_stderr": 0.049135952012744975,
"acc_norm": 0.4215686274509804,
"acc_norm_stderr": 0.049135952012744975
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.04292346959909282,
"acc_norm": 0.76,
"acc_norm_stderr": 0.04292346959909282
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5829787234042553,
"acc_stderr": 0.03223276266711712,
"acc_norm": 0.5829787234042553,
"acc_norm_stderr": 0.03223276266711712
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5,
"acc_stderr": 0.047036043419179864,
"acc_norm": 0.5,
"acc_norm_stderr": 0.047036043419179864
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5724137931034483,
"acc_stderr": 0.04122737111370333,
"acc_norm": 0.5724137931034483,
"acc_norm_stderr": 0.04122737111370333
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41005291005291006,
"acc_stderr": 0.025331202438944437,
"acc_norm": 0.41005291005291006,
"acc_norm_stderr": 0.025331202438944437
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.48412698412698413,
"acc_stderr": 0.04469881854072606,
"acc_norm": 0.48412698412698413,
"acc_norm_stderr": 0.04469881854072606
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7903225806451613,
"acc_stderr": 0.023157879349083525,
"acc_norm": 0.7903225806451613,
"acc_norm_stderr": 0.023157879349083525
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.49261083743842365,
"acc_stderr": 0.03517603540361008,
"acc_norm": 0.49261083743842365,
"acc_norm_stderr": 0.03517603540361008
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7818181818181819,
"acc_stderr": 0.03225078108306289,
"acc_norm": 0.7818181818181819,
"acc_norm_stderr": 0.03225078108306289
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7878787878787878,
"acc_stderr": 0.029126522834586815,
"acc_norm": 0.7878787878787878,
"acc_norm_stderr": 0.029126522834586815
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9015544041450777,
"acc_stderr": 0.02150024957603348,
"acc_norm": 0.9015544041450777,
"acc_norm_stderr": 0.02150024957603348
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6692307692307692,
"acc_stderr": 0.023854795680971114,
"acc_norm": 0.6692307692307692,
"acc_norm_stderr": 0.023854795680971114
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3111111111111111,
"acc_stderr": 0.02822644674968351,
"acc_norm": 0.3111111111111111,
"acc_norm_stderr": 0.02822644674968351
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6890756302521008,
"acc_stderr": 0.030066761582977927,
"acc_norm": 0.6890756302521008,
"acc_norm_stderr": 0.030066761582977927
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.33774834437086093,
"acc_stderr": 0.03861557546255169,
"acc_norm": 0.33774834437086093,
"acc_norm_stderr": 0.03861557546255169
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8532110091743119,
"acc_stderr": 0.015173141845126243,
"acc_norm": 0.8532110091743119,
"acc_norm_stderr": 0.015173141845126243
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5277777777777778,
"acc_stderr": 0.0340470532865388,
"acc_norm": 0.5277777777777778,
"acc_norm_stderr": 0.0340470532865388
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8529411764705882,
"acc_stderr": 0.024857478080250434,
"acc_norm": 0.8529411764705882,
"acc_norm_stderr": 0.024857478080250434
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8143459915611815,
"acc_stderr": 0.025310495376944867,
"acc_norm": 0.8143459915611815,
"acc_norm_stderr": 0.025310495376944867
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6905829596412556,
"acc_stderr": 0.03102441174057221,
"acc_norm": 0.6905829596412556,
"acc_norm_stderr": 0.03102441174057221
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7709923664122137,
"acc_stderr": 0.036853466317118506,
"acc_norm": 0.7709923664122137,
"acc_norm_stderr": 0.036853466317118506
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8099173553719008,
"acc_stderr": 0.03581796951709282,
"acc_norm": 0.8099173553719008,
"acc_norm_stderr": 0.03581796951709282
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8055555555555556,
"acc_stderr": 0.038260763248848646,
"acc_norm": 0.8055555555555556,
"acc_norm_stderr": 0.038260763248848646
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7484662576687117,
"acc_stderr": 0.03408997886857529,
"acc_norm": 0.7484662576687117,
"acc_norm_stderr": 0.03408997886857529
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.44642857142857145,
"acc_stderr": 0.047184714852195886,
"acc_norm": 0.44642857142857145,
"acc_norm_stderr": 0.047184714852195886
},
"harness|hendrycksTest-management|5": {
"acc": 0.8155339805825242,
"acc_stderr": 0.03840423627288276,
"acc_norm": 0.8155339805825242,
"acc_norm_stderr": 0.03840423627288276
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8803418803418803,
"acc_stderr": 0.021262719400406957,
"acc_norm": 0.8803418803418803,
"acc_norm_stderr": 0.021262719400406957
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8390804597701149,
"acc_stderr": 0.013140225515611729,
"acc_norm": 0.8390804597701149,
"acc_norm_stderr": 0.013140225515611729
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7312138728323699,
"acc_stderr": 0.023868003262500104,
"acc_norm": 0.7312138728323699,
"acc_norm_stderr": 0.023868003262500104
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4424581005586592,
"acc_stderr": 0.016611393687268577,
"acc_norm": 0.4424581005586592,
"acc_norm_stderr": 0.016611393687268577
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7254901960784313,
"acc_stderr": 0.02555316999182652,
"acc_norm": 0.7254901960784313,
"acc_norm_stderr": 0.02555316999182652
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7138263665594855,
"acc_stderr": 0.025670259242188926,
"acc_norm": 0.7138263665594855,
"acc_norm_stderr": 0.025670259242188926
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7530864197530864,
"acc_stderr": 0.0239935017090421,
"acc_norm": 0.7530864197530864,
"acc_norm_stderr": 0.0239935017090421
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4858156028368794,
"acc_stderr": 0.02981549448368206,
"acc_norm": 0.4858156028368794,
"acc_norm_stderr": 0.02981549448368206
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4530638852672751,
"acc_stderr": 0.01271384597235898,
"acc_norm": 0.4530638852672751,
"acc_norm_stderr": 0.01271384597235898
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6985294117647058,
"acc_stderr": 0.027875982114273168,
"acc_norm": 0.6985294117647058,
"acc_norm_stderr": 0.027875982114273168
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.019070985589687495,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.019070985589687495
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6636363636363637,
"acc_stderr": 0.04525393596302506,
"acc_norm": 0.6636363636363637,
"acc_norm_stderr": 0.04525393596302506
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7142857142857143,
"acc_stderr": 0.028920583220675606,
"acc_norm": 0.7142857142857143,
"acc_norm_stderr": 0.028920583220675606
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.845771144278607,
"acc_stderr": 0.025538433368578337,
"acc_norm": 0.845771144278607,
"acc_norm_stderr": 0.025538433368578337
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.0358870281282637,
"acc_norm": 0.85,
"acc_norm_stderr": 0.0358870281282637
},
"harness|hendrycksTest-virology|5": {
"acc": 0.536144578313253,
"acc_stderr": 0.03882310850890594,
"acc_norm": 0.536144578313253,
"acc_norm_stderr": 0.03882310850890594
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8187134502923976,
"acc_stderr": 0.029547741687640044,
"acc_norm": 0.8187134502923976,
"acc_norm_stderr": 0.029547741687640044
},
"harness|truthfulqa:mc|0": {
"mc1": 0.47368421052631576,
"mc1_stderr": 0.017479241161975526,
"mc2": 0.651312479462047,
"mc2_stderr": 0.015362786449252212
},
"harness|winogrande|5": {
"acc": 0.7987371744277821,
"acc_stderr": 0.01126851997157768
},
"harness|gsm8k|5": {
"acc": 0.686125852918878,
"acc_stderr": 0.012782681251053205
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
larrylawl/opus | ---
annotations_creators:
- expert-generated
- found
language_creators:
- found
- expert-generated
license: []
multilinguality:
- translation
pretty_name: opus
size_categories: []
source_datasets: []
tags:
- parallel-corpus
task_categories:
- translation
task_ids: []
---
# Dataset Card for [opus]
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
**Disclaimer.** Loading of dataset is slow, thus it may not be feasible when loading at scale. I'd suggest to use the other OPUS datasets on Huggingface which loads a specific corpus.
Loads [OPUS](https://opus.nlpl.eu/) as HuggingFace dataset. OPUS is an open parallel corpus covering 700+ languages and 1100+ datasets.
Given a `src` and `tgt` language, this repository can load *all* available parallel corpus. To my knowledge, other OPUS datasets on Huggingface loads a specific corpus
**Requirements**.
```
pip install pandas
# pip install my fork of `opustools`
git clone https://github.com/larrylawl/OpusTools.git
pip install -e OpusTools/opustools_pkg
```
**Example Usage**.
```
# args follows `opustools`: https://pypi.org/project/opustools/
src="en"
tgt="id"
download_dir="data" # dir to save downloaded files
corpus="bible-uedin" # corpus name. Leave as `None` to download all available corpus for the src-tgt pair.
dataset = load_dataset("larrylawl/opus",
src=src,
tgt=tgt,
download_dir=download_dir,
corpus=corpus)
)
```
**Disclaimer**.
This repository is still in active development. Do make a PR if there're any issues!
### Dataset Summary
[More Information Needed]
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
Available languages can be viewed on the [OPUS API](https://opus.nlpl.eu/opusapi/?languages=True)
## Dataset Structure
### Data Instances
```
{'src': 'In the beginning God created the heavens and the earth .',
'tgt': 'Pada mulanya , waktu Allah mulai menciptakan alam semesta'}
```
### Data Fields
```
features = {
"src": datasets.Value("string"),
"tgt": datasets.Value("string"),
}
```
### Data Splits
Merged all data into train split.
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
Thanks to [@larrylawl](https://larrylawl.github.io/) for adding this dataset. |
autoevaluate/autoeval-eval-glue-mrpc-9038ab-1509054846 | ---
type: predictions
tags:
- autotrain
- evaluation
datasets:
- glue
eval_info:
task: natural_language_inference
model: JeremiahZ/bert-base-uncased-mrpc
metrics: []
dataset_name: glue
dataset_config: mrpc
dataset_split: validation
col_mapping:
text1: sentence1
text2: sentence2
target: label
---
# Dataset Card for AutoTrain Evaluator
This repository contains model predictions generated by [AutoTrain](https://huggingface.co/autotrain) for the following task and dataset:
* Task: Natural Language Inference
* Model: JeremiahZ/bert-base-uncased-mrpc
* Dataset: glue
* Config: mrpc
* Split: validation
To run new evaluation jobs, visit Hugging Face's [automatic model evaluator](https://huggingface.co/spaces/autoevaluate/model-evaluator).
## Contributions
Thanks to [@JeremiahZ](https://huggingface.co/JeremiahZ) for evaluating this model. |
TinyPixel/lima_1 | ---
dataset_info:
features:
- name: human
dtype: string
- name: gpt
dtype: string
splits:
- name: train
num_bytes: 2887450
num_examples: 1030
download_size: 1701721
dataset_size: 2887450
---
# Dataset Card for "lima_1"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
result-kand2-sdxl-wuerst-karlo/b6112e1b | ---
dataset_info:
features:
- name: result
dtype: string
- name: id
dtype: int64
splits:
- name: train
num_bytes: 166
num_examples: 10
download_size: 1318
dataset_size: 166
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "b6112e1b"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
dmayhem93/agieval-lsat-ar | ---
dataset_info:
features:
- name: query
dtype: string
- name: choices
sequence: string
- name: gold
sequence: int64
splits:
- name: test
num_bytes: 273902
num_examples: 230
download_size: 66495
dataset_size: 273902
---
# Dataset Card for "agieval-lsat-ar"
Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo.
Raw datset: https://github.com/zhongwanjun/AR-LSAT
MIT License
Copyright (c) 2022 Wanjun Zhong
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
@misc{zhong2023agieval,
title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models},
author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan},
year={2023},
eprint={2304.06364},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
@misc{zhong2021arlsat,
title={AR-LSAT: Investigating Analytical Reasoning of Text},
author={Wanjun Zhong and Siyuan Wang and Duyu Tang and Zenan Xu and Daya Guo and Jiahai Wang and Jian Yin and Ming Zhou and Nan Duan},
year={2021},
eprint={2104.06598},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
@article{wang2022lsat,
title={From lsat: The progress and challenges of complex reasoning},
author={Wang, Siyuan and Liu, Zhongkun and Zhong, Wanjun and Zhou, Ming and Wei, Zhongyu and Chen, Zhumin and Duan, Nan},
journal={IEEE/ACM Transactions on Audio, Speech, and Language Processing},
year={2022},
publisher={IEEE}
} |
dkshjn/chatdoctor-200k-stripped-embedded | ---
dataset_info:
features:
- name: input
dtype: string
- name: instruction
dtype: string
- name: output
dtype: string
splits:
- name: s1
num_bytes: 82572870.22843863
num_examples: 82964
- name: s2
num_bytes: 82571874.94289035
num_examples: 82963
- name: s3
num_bytes: 82571874.94289035
num_examples: 82963
- name: s4
num_bytes: 82571874.94289035
num_examples: 82963
- name: s5
num_bytes: 82571874.94289035
num_examples: 82963
download_size: 231809582
dataset_size: 412860370.0
configs:
- config_name: default
data_files:
- split: s1
path: data/s1-*
- split: s2
path: data/s2-*
- split: s3
path: data/s3-*
- split: s4
path: data/s4-*
- split: s5
path: data/s5-*
---
|
Seanxh/twitter_dataset_1713204712 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 134471
num_examples: 315
download_size: 51234
dataset_size: 134471
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
Nexdata/Russian_Spontaneous_Speech_Data | ---
task_categories:
- automatic-speech-recognition
language:
- ru
---
# Dataset Card for Nexdata/Russian_Spontaneous_Speech_Data
## Description
503 Hours - Russian Spontaneous Speech Data, the content covering multiple topics. All the speech audio was manually transcribed into text content; speaker identity, gender, and other attribution are also annotated. This dataset can be used for voiceprint recognition model training, corpus construction for machine translation, and algorithm research introduction
For more details, please refer to the link: https://www.nexdata.ai/datasets/1271?source=Huggingface
# Specifications
## Format
16kHz, 16bit, mono channel;
## Content category
including education, interview, sports, etc
## Language
Russian;
## Annotation
annotation for the transcription text, speaker identification, gender;
## Application scenarios
speech recognition, video caption generation and video content review;
## Accuracy
at a word Accuracy Rate (WAR) of being no less than 98%.
# Licensing Information
Commercial License |
sruly/StepBackSearch | ---
license: apache-2.0
---
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.