id
stringlengths
2
115
author
stringlengths
2
42
last_modified
timestamp[us, tz=UTC]
downloads
int64
0
8.87M
likes
int64
0
3.84k
paperswithcode_id
stringlengths
2
45
tags
list
lastModified
timestamp[us, tz=UTC]
createdAt
stringlengths
24
24
key
stringclasses
1 value
created
timestamp[us]
card
stringlengths
1
1.01M
embedding
list
library_name
stringclasses
21 values
pipeline_tag
stringclasses
27 values
mask_token
null
card_data
null
widget_data
null
model_index
null
config
null
transformers_info
null
spaces
null
safetensors
null
transformersInfo
null
modelId
stringlengths
5
111
embeddings
list
BioDEX/BioDEX-ICSR
BioDEX
2023-05-30T15:20:25Z
139
2
null
[ "region:us" ]
2023-05-30T15:20:25Z
2023-04-19T11:10:45.000Z
2023-04-19T11:10:45
--- dataset_info: features: - name: title dtype: string - name: abstract dtype: string - name: fulltext dtype: string - name: target dtype: string - name: pmid dtype: string - name: fulltext_license dtype: string - name: title_normalized dtype: string - name: issue dtype: string - name: pages dtype: string - name: journal dtype: string - name: authors dtype: string - name: pubdate dtype: string - name: doi dtype: string - name: affiliations dtype: string - name: medline_ta dtype: string - name: nlm_unique_id dtype: string - name: issn_linking dtype: string - name: country dtype: string - name: mesh_terms dtype: string - name: publication_types dtype: string - name: chemical_list dtype: string - name: keywords dtype: string - name: references dtype: string - name: delete dtype: bool - name: pmc dtype: string - name: other_id dtype: string - name: safetyreportid dtype: int64 - name: fulltext_processed dtype: string splits: - name: test num_bytes: 155748936 num_examples: 3628 - name: train num_bytes: 374859364 num_examples: 9624 - name: validation num_bytes: 96385392 num_examples: 2407 download_size: 337571954 dataset_size: 626993692 --- # Dataset Card for "BioDEX-ICSR" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.6600859761238098, 0.11733780056238174, 0.3238835334777832, 0.11427522450685501, -0.43622347712516785, 0.30796316266059875, 0.5516594648361206, -0.3327437937259674, 0.9295400381088257, 0.31145179271698, -0.8390932679176331, -0.835780918598175, -0.5937572121620178, 0.02951165847480297, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
burkelibbey/colors
burkelibbey
2023-07-14T18:58:16Z
139
6
null
[ "license:mit", "region:us" ]
2023-07-14T18:58:16Z
2023-07-14T17:35:54.000Z
2023-07-14T17:35:54
--- license: mit ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
cdminix/bu_radio
cdminix
2023-10-24T08:07:47Z
139
0
null
[ "task_categories:automatic-speech-recognition", "task_categories:text-to-speech", "license:other", "region:us" ]
2023-10-24T08:07:47Z
2023-07-17T15:05:46.000Z
2023-07-17T15:05:46
--- license: other task_categories: - automatic-speech-recognition - text-to-speech --- Simply point ``BURN_PATH`` to your local copy of the dataset.
[ 0.08851443231105804, -0.1832600235939026, 0.2918967604637146, -0.016444895416498184, -0.3682772219181061, 0.026113957166671753, 0.07321493327617645, 0.5536946058273315, 0.7418252825737, 1.2187919616699219, -0.7600139379501343, -0.495410680770874, -0.03902718424797058, -0.18896326422691345,...
null
null
null
null
null
null
null
null
null
null
null
null
null
flpelerin/ChatAlpaca-10k
flpelerin
2023-08-10T20:12:26Z
139
0
null
[ "license:cc-by-4.0", "region:us" ]
2023-08-10T20:12:26Z
2023-08-10T18:14:51.000Z
2023-08-10T18:14:51
--- license: cc-by-4.0 ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
hippocrates/CitationGPTv2_train
hippocrates
2023-11-10T17:06:33Z
139
0
null
[ "region:us" ]
2023-11-10T17:06:33Z
2023-11-07T19:12:52.000Z
2023-11-07T19:12:52
--- dataset_info: features: - name: id dtype: string - name: conversations list: - name: from dtype: string - name: value dtype: string - name: text dtype: string splits: - name: train num_bytes: 365416080 num_examples: 99360 - name: valid num_bytes: 47324714 num_examples: 12760 - name: test num_bytes: 42152251 num_examples: 11615 download_size: 17034010 dataset_size: 454893045 --- # Dataset Card for "CitationGPTv2_train" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5085615515708923, 0.041504956781864166, 0.16061604022979736, 0.4849715232849121, -0.18019253015518188, -0.1756226122379303, 0.19922864437103271, 0.013894396834075451, 0.40075767040252686, 0.16218070685863495, -0.6023366451263428, -0.36217445135116577, -0.7558753490447998, -0.33523374795...
null
null
null
null
null
null
null
null
null
null
null
null
null
AntoineBlanot/snli-3way
AntoineBlanot
2023-11-17T02:57:32Z
139
0
null
[ "region:us" ]
2023-11-17T02:57:32Z
2023-11-17T02:50:18.000Z
2023-11-17T02:50:18
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: premise dtype: string - name: hypothesis dtype: string - name: label_name dtype: string splits: - name: train num_bytes: 69083095 num_examples: 549367 - name: test num_bytes: 1300733 num_examples: 9842 download_size: 19994363 dataset_size: 70383828 --- # Dataset Card for "snli-3way" This dataset is the [snli](https://huggingface.co/datasets/snli) dataset where the labels are: `entailment`, `contradiction` and `neutral`. [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.31044360995292664, -0.4692874550819397, 0.36858612298965454, 0.30161523818969727, -0.22364412248134613, -0.10151828080415726, 0.337435245513916, -0.38749831914901733, 0.8425067663192749, 0.7000089287757874, -0.8753229379653931, -0.5255781412124634, -0.5217709541320801, 0.202860191464424...
null
null
null
null
null
null
null
null
null
null
null
null
null
qwant/squad_fr
qwant
2023-04-19T14:37:09Z
138
6
squad
[ "task_categories:question-answering", "task_ids:extractive-qa", "task_ids:closed-domain-qa", "annotations_creators:machine-generated", "language_creators:machine-generated", "multilinguality:monolingual", "multilinguality:translation", "size_categories:10K<n<100K", "source_datasets:extended|squad", ...
2023-04-19T14:37:09Z
2022-03-02T23:29:22.000Z
2022-03-02T23:29:22
--- annotations_creators: - machine-generated language_creators: - machine-generated language: - fr license: - cc-by-4.0 multilinguality: - monolingual - translation paperswithcode_id: squad pretty_name: SQuAD-fr size_categories: - 10K<n<100K source_datasets: - extended|squad task_categories: - question-answering task_ids: - extractive-qa - closed-domain-qa --- # Dataset Card for "squad_fr" ## Table of Contents - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) ## Dataset Description - **Paper:** [On the Usability of Transformers-based models for a French Question-Answering task](https://hal.archives-ouvertes.fr/hal-03336060) - **Size of downloaded dataset files:** 10 MB - **Size of the generated dataset:** 73 MB - **Total amount of disk used:** 83 MB ### Dataset Summary SQuAD-fr: - a translated version of the Stanford Question Answering Dataset (SQuAD) into French - obtained through automatic translation of the English dataset - a reading comprehension dataset, consisting of approximately 90K factoid questions on Wikipedia articles, where the answer to every question is a segment of text, or span, from the corresponding reading passage - serves as a means of data augmentation on FQuAD and PIAF benchmarks ### Supported Tasks and Leaderboards - `closed-domain-qa`, `text-retrieval`: This dataset is intended to be used for `closed-domain-qa`, but can also be used for information retrieval tasks. ### Languages This dataset is exclusively in French. ## Dataset Structure ### Data Instances #### default - **Size of downloaded dataset files:** 10 MB - **Size of the generated dataset:** 73 MB - **Total amount of disk used:** 83 MB An example of 'train' looks as follows. ``` { "answers": { "answer_start": [1], "text": ["This is a test text"] }, "context": "This is a test context.", "id": "1", "question": "Is this a test?", "title": "train test" } ``` ### Data Fields The data fields are the same among all splits. #### plain_text - `id`: a `string` feature. - `title`: a `string` feature. - `context`: a `string` feature. - `question`: a `string` feature. - `answers`: a dictionary feature containing: - `text`: a `string` feature. - `answer_start`: a `int32` feature. ### Data Splits | name |train|validation| |----------|----:|---------:| |1.1.0|87514| 17492| ## Dataset Creation ### Curation Rationale Usability of Transformer-based models, instability relating to data scarcity, investigation of data augmentation, hyperparameters optimization and cross-lingual transfer on the performance of a question-answering task in French. ### Source Data #### Initial Data Collection and Normalization validation: manually collected gold standards, chrf scores and bleu evaluation #### Who are the source language producers? [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Annotations #### Annotation process [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) #### Who are the annotators? [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Personal and Sensitive Information [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Discussion of Biases [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Other Known Limitations [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ## Additional Information ### Dataset Curators [More Information Needed](https://github.com/huggingface/datasets/blob/master/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) ### Licensing Information Attribution-NonCommercial-ShareAlike 3.0 Unported (CC BY-NC-SA 3.0) ### Citation Information ``` @inproceedings{cattan:hal-03336060, TITLE = {{On the Usability of Transformers-based models for a French Question-Answering task}}, AUTHOR = {Cattan, Oralie and Servan, Christophe and Rosset, Sophie}, URL = {https://hal.archives-ouvertes.fr/hal-03336060}, BOOKTITLE = {{Recent Advances in Natural Language Processing (RANLP)}}, ADDRESS = {Varna, Bulgaria}, YEAR = {2021}, MONTH = Sep, PDF = {https://hal.archives-ouvertes.fr/hal-03336060/file/RANLP_2021_transformers_usability.pdf}, HAL_ID = {hal-03336060}, HAL_VERSION = {v1}, } ```
[ -0.6871771216392517, -0.7320301532745361, 0.09354976564645767, 0.22498680651187897, -0.03175326809287071, 0.02429107204079628, -0.306185781955719, -0.34220996499061584, 0.27128148078918457, 0.5280674695968628, -0.9441037178039551, -0.7719646692276001, -0.302595317363739, 0.2789507210254669...
null
null
null
null
null
null
null
null
null
null
null
null
null
taskydata/tasky_or_not
taskydata
2023-05-31T03:30:14Z
138
0
null
[ "task_categories:text-classification", "size_categories:10M<n<100M", "language:en", "license:mit", "region:us" ]
2023-05-31T03:30:14Z
2022-06-14T15:42:22.000Z
2022-06-14T15:42:22
--- license: mit task_categories: - text-classification language: - en size_categories: - 10M<n<100M ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
tomekkorbak/detoxify-pile-chunk3-50000-100000
tomekkorbak
2022-10-06T02:59:17Z
138
0
null
[ "region:us" ]
2022-10-06T02:59:17Z
2022-10-03T18:47:12.000Z
2022-10-03T18:47:12
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
language-and-voice-lab/samromur_asr
language-and-voice-lab
2023-02-24T22:02:05Z
138
0
null
[ "task_categories:automatic-speech-recognition", "annotations_creators:crowdsourced", "language_creators:crowdsourced", "multilinguality:monolingual", "size_categories:100K<n<1M", "source_datasets:original", "language:is", "license:cc-by-4.0", "crowd-sourced icelandic", "samrómur", "icelandic spe...
2023-02-24T22:02:05Z
2022-11-27T08:33:04.000Z
2022-11-27T08:33:04
--- annotations_creators: - crowdsourced language: - is language_creators: - crowdsourced license: - cc-by-4.0 multilinguality: - monolingual pretty_name: "Samrómur Icelandic Speech 1.0." size_categories: - 100K<n<1M source_datasets: - original tags: - crowd-sourced icelandic - "samrómur" - icelandic speech - samromur - iceland task_categories: - automatic-speech-recognition task_ids: [] --- # Dataset Card for samromur_asr ## Table of Contents - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** [Samrómur 21.05] - **Repository:** [OpenSLR](http://www.openslr.org/112/) - **Paper:** [Samrómur: Crowd-sourcing Data Collection for Icelandic Speech Recognition](https://aclanthology.org/2020.lrec-1.425.pdf) - **Point of Contact:** [Jón Guðnason](mailto:jg@ru.is) ### Dataset Summary This is the first release of the Samrómur Icelandic Speech corpus that contains 100.000 validated utterances. The corpus is a result of the crowd-sourcing effort run by the Language and Voice Lab at the Reykjavik University, in cooperation with Almannarómur, Center for Language Technology. ### Example Usage The Samrómur Corpus is divided in 3 splits: train, validation and test. To load a specific split pass its name as a config name: ```python from datasets import load_dataset samromur_asr = load_dataset("language-and-voice-lab/samromur_asr") ``` To load an specific split (for example, the validation split) do: ```python from datasets import load_dataset samromur_asr = load_dataset("language-and-voice-lab/samromur_asr",split="validation") ``` ### Supported Tasks automatic-speech-recognition: The dataset can be used to train a model for Automatic Speech Recognition (ASR). The model is presented with an audio file and asked to transcribe the audio file to written text. The most common evaluation metric is the word error rate (WER). ### Languages The audio is in Icelandic. The reading prompts were gathered from a variety of sources, mainly from the [Icelandic Gigaword Corpus](http://clarin.is/en/resources/gigaword). The corpus includes text from novels, news, plays, and from a list of location names in Iceland. The prompts also came from the [Icelandic Web of Science](https://www.visindavefur.is/). ## Dataset Structure ### Data Instances ```python { 'audio_id': '009123-0150695', 'audio': { 'path': '/home/david/.cache/HuggingFace/datasets/downloads/extracted/cb428a7f1e46b058d76641ef32f36b49d28b73aea38509983170495408035a10/dev/009123/009123-0150695.flac', 'array': array([0., 0., 0., ..., 0., 0., 0.], dtype=float32), 'sampling_rate': 16000 }, 'speaker_id': '009123', 'gender': 'female', 'age': '18-19', 'duration': 3.299999952316284, 'normalized_text': 'það skipti heldur engu' } ``` ### Data Fields * `audio_id` (string) - id of audio segment * `audio` (datasets.Audio) - a dictionary containing the path to the audio, the decoded audio array, and the sampling rate. In non-streaming mode (default), the path points to the locally extracted audio. In streaming mode, the path is the relative path of an audio inside its archive (as files are not downloaded and extracted locally). * `speaker_id` (string) - id of speaker * `gender` (string) - gender of speaker (male or female) * `age` (string) - range of age of the speaker. * `duration` (float32) - duration of the audio file in seconds. * `normalized_text` (string) - normalized audio segment transcription. ### Data Splits The corpus is split into train, validation, and test subsets with no speaker overlap. Each subset contains folders that correspond to speaker IDs, and the audio files inside use the following naming convention: {speaker_ID}-{utterance_ID}.flac. Lenghts of each portion are: train=114h/34m, test=15h51m, validation=15h16m. To load an specific portion please see the above section "Example Usage". ## Dataset Creation ### Curation Rationale * The recording has started in October 2019 and continues to this day (May 2021). * This release has been authorized for release in May 2021. * The aim is to create an open-source speech corpus to enable research and development for Icelandic Language Technology. * The corpus contains audio recordings and a metadata file that contains the prompts the participants read. * A Kaldi based script using this data can be found on the Language and Voice Lab gitHub page https://github.com/cadia-lvl/samromur-asr ### Source Data #### Initial Data Collection and Normalization * The utterances were recorded by a smartphone or the web app. * The data was collected using the website https://samromur.is, code of which is available at https://github.com/cadia-lvl/samromur. * Each recording contains one read sentence from a script. * The script contains 85.080 unique sentences and 90.838 unique tokens. ### Annotations #### Annotation process Prompts were pulled from these corpora if they met the criteria of having only letters which are present in the Icelandic alphabet, and if they are listed in the [DIM: Database Icelandic Morphology](https://aclanthology.org/W19-6116.pdf). There are also synthesised prompts consisting of a name followed by a question or a demand, in order to simulate a dialogue with a smart-device. #### Who are the annotators? The audio files content was manually verified against the prompts by one or more listener (summer students mainly). ### Personal and Sensitive Information The dataset consists of people who have donated their voice. You agree to not attempt to determine the identity of speakers in this dataset. ## Considerations for Using the Data ### Social Impact of Dataset This contribution describes an ongoing project of speech data collection, using the web application Samrómur which is built upon Common Voice, Mozilla Foundation's web platform for open-source voice collection. The goal of the project is to build a large-scale speech corpus for Automatic Speech Recognition (ASR) for Icelandic. Upon completion, Samrómur will be the largest open speech corpus for Icelandic collected from the public domain. ### Discussion of Biases * The participants are aged between 18 to 90, 59,782 recordings are from female speakers and 40,218 are from male, recorded by a smartphone or the web app. * Participants self-reported their age group, gender, and the native language. * The corpus contains 100 000 utterance from 8392 speaker, totalling 145 hours. ### Other Known Limitations "Samromur 21.05" by the Language and Voice Laboratory (LVL) at the Reykjavik University is licensed under a Creative Commons Attribution 4.0 International (CC BY 4.0) License with the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. ## Additional Information ### Dataset Curators The corpus is a result of the crowd-sourcing effort run by the Language and Voice Lab at the Reykjavik University, in cooperation with Almannarómur, Center for Language Technology. ### Licensing Information [CC-BY-4.0](https://creativecommons.org/licenses/by/4.0/) ### Citation Information ``` @inproceedings{mollberg-etal-2020-samromur, title = "{S}amr{\'o}mur: Crowd-sourcing Data Collection for {I}celandic Speech Recognition", author = "Mollberg, David Erik and J{\'o}nsson, {\'O}lafur Helgi and {\TH}orsteinsd{\'o}ttir, Sunneva and Steingr{\'\i}msson, Stein{\th}{\'o}r and Magn{\'u}sd{\'o}ttir, Eyd{\'\i}s Huld and Gudnason, Jon", booktitle = "Proceedings of the 12th Language Resources and Evaluation Conference", month = may, year = "2020", address = "Marseille, France", publisher = "European Language Resources Association", url = "https://aclanthology.org/2020.lrec-1.425", pages = "3463--3467", language = "English", ISBN = "979-10-95546-34-4", } ``` ### Contributions This project was funded by the Language Technology Programme for Icelandic 2019-2023. The programme, which is managed and coordinated by Almannarómur, is funded by the Icelandic Ministry of Education, Science and Culture. The verification for the dataset was funded by the the Icelandic Directorate of Labour's Student Summer Job Program. Special thanks for the summer students for all the hard work.
[ -0.3894495368003845, -0.648883044719696, 0.016487756744027138, 0.10423047095537186, -0.33529090881347656, 0.012838313356041908, -0.35469287633895874, -0.38298681378364563, 0.33777886629104614, 0.33354997634887695, -0.5448759198188782, -0.6836709976196289, -0.5658406019210815, 0.22748324275...
null
null
null
null
null
null
null
null
null
null
null
null
null
laion/OIG
laion
2023-03-31T00:06:28Z
138
258
null
[ "license:apache-2.0", "region:us" ]
2023-03-31T00:06:28Z
2023-03-05T00:34:58.000Z
2023-03-05T00:34:58
--- license: apache-2.0 --- # This is the Open Instruction Generalist Dataset This is our attempt to create a large instruction dataset of medium quality along with a smaller high quality instruciton dataset (OIG-small-chip2). The data is in the form of jsonl objects, with at least a 'text' field. Some datasets may also include a 'metadata' field. The 'text' field contains a string of the form of one or more of: - \<human\>: instruction\n\<bot\>: response - \<human\>: instruction\n\<bot\>: response .. \<human\>: instruction\n\<bot\>: response The purpose of the larger dataset is to perform continued pre-training, followed by a finetune on the smaller high quality dataset. The purpose of the smaller OIG-small-chip2 dataset is to make it easy to convert a language model pretrained on large amounts of text into an instruction following model using a small amount of additional compute via finetuning or softprompt tuning. Many additional datasets are being prepared by various community members and will be incorporated into this dataset as we are able to verify the quality and formatting of the data. Our goal is to make helpful and non-toxic instruction tuned models available to everyone. OIG is currently at 44M. We will continue to publish ever larger diverse instruction datasets with the goal of creating 1 trillion tokens of diverse instructions - enough to pretrain an LLM from scratch. It is best to download the individual jsonl files directly that you wish to use instead of using HF load_datasets. https://huggingface.co/datasets/laion/OIG/tree/main ## unified_abstract_infill.jsonl (~232000) dbpedia and wikipedia snippets combined with a small portion of https://github.com/google-research/dialog-inpainting ## unified_basic.jsonl (30) ## unified_conv_finqa.jsonl (~9000) https://github.com/czyssrs/ConvFinQA ## unified_cuad.jsonl (~500) https://www.atticusprojectai.org/cuad ## unified_essays.jsonl (~2000) - essays available on the public web ## unified_grade_school_math_instructions.jsonl (~9000) - https://github.com/openai/grade-school-math ## unified_hc3_human.jsonl (~58000) ## unified_image_prompts_instructions.jsonl (~15000) - A very small subset of LAION-400M ## unified_joke_explanations.jsonl (356) - Crawled from public internet. ## unified_mathqa_flanv2_kojma_cot.jsonl (~107000) - https://huggingface.co/datasets/math_qa, ## unified_merged_code_xp3.jsonl (~67000) - https://huggingface.co/datasets/bigscience/xP3 ## unified_multi_news.jsonl (~90000) - https://www.tensorflow.org/datasets/catalog/multi_news ## unified_multi_sum.jsonl (~1700000) ## unified_nq.jsonl (~307000) ## unified_openai_summarize_tldr.jsonl (~233000) - https://github.com/openai/summarize-from-feedback ## unified_oscar_en_sample_dialog.jsonl (~2670000) - https://oscar-project.org/ - https://huggingface.co/datasets/TurkuNLP/register_oscar ## unified_plot_screenplay_books_dialog.jsonl (~8000) - https://github.com/markriedl/WikiPlots extracted from Wikipedia, snippets from the Pile’s https://huggingface.co/datasets/the_pile_books3, and snippets of screenplays available on the public web. ## unified_sqlv1.jsonl (~17000) - public text 2 sql datasets. ## unified_sqlv2.jsonl(~24000) - public text 2 sql datasets. ## unified_squad_v2.jsonl (~19000) - https://rajpurkar.github.io/SQuAD-explorer/ ## unified_squad_v2_more_neg.jsonl (~19000) - https://rajpurkar.github.io/SQuAD-explorer/ ## unified_ul2_plus_oscar_en_sample_dialog.jsonl (~2900000) - https://oscar-project.org/ - https://huggingface.co/datasets/TurkuNLP/register_oscar ## unified_unifiedskg_instructions.jsonl (~223000) - https://github.com/HKUNLP/UnifiedSKG ## unified_unnatural_instructions.jsonl (~238000) - https://github.com/orhonovich/unnatural-instructions ## unified_xp3_sample.jsonl (~188000) - https://huggingface.co/datasets/bigscience/xP3 ## unified_canadian_parliament.jsonl(~301000) - https://openparliament.ca/data-download/ ## unified_poetry_2_song.jsonl (~12000) - https://huggingface.co/datasets/merve/poetry - https://huggingface.co/datasets/matthh/gutenberg-poetry-corpus ## unified_flan.jsonl (~2700000) - https://github.com/google-research/FLAN/tree/main/flan/v2 ## unified_ni.jsonl (~256000) - https://github.com/allenai/natural-instructions ## unified_p3.jsonl (~31000000) - https://huggingface.co/datasets/bigscience/P3 ## unified_soda_dialog.jsonl (~1200000) - https://huggingface.co/datasets/allenai/soda ## unified_rallio_soda_upgraded_2048.jsonl (~210000) - https://huggingface.co/datasets/allenai/soda - a newer version of the unified_soda_dialog dataset, with multiple dialogs on one line - recommend to use either the unified_soda_dailog.jsonl or unified_rallio_soda_upgraded_2048, and not both. ## unified_rallio_safety_and_prosocial.jsonl (~319000) - Generated from public datasets and generated from Wiki similar to the chip2 data - Find a full list in the end of the document - This dataset also includes https://huggingface.co/datasets/allenai/prosocial-dialog and https://huggingface.co/datasets/Anthropic/hh-rlhf ## unified-chip2.jsonl / OIG-small-chip2 (~210000): This dataset was created as part of the LAION OA effort by @rallio67 and other members of the LAION contributors. It is a high quality dataset intended to be mixed into a large pre-train dataset and can be used for a final finetune. Chip2 contains: ### Python Code Examples (~6,000): A set of instruction / response pairs where the User requests the agent to generate a python function. These examples were generated using a large language model and few shot prompting with python code verified to execute. There are also ~3000 examples of manually curated one line python code examples from the Conala publication (see: https://conala-corpus.github.io/) ### Natural Instruction Examples (~124,000): A balanced set of diverse natural and factual questions and answers made using few shot prompted UL2 20B and an instruction tuned GPT-NeoX-20B model (Chip) and then rejection sampled using multiple automatic evaluations to remove low quality outputs and to filter out factually inaccurate answers. Also includes some filtered natural instructions from Anthropic Helpful instructions (see: https://github.com/anthropics/hh-rlhf). ### Generic Harmless Instruction Examples (~6,500): A set of instruction / response pairs sourced from the Anthropic redteam paper github (see: https://github.com/anthropics/hh-rlhf). This dataset includes a lot of data regarding real humans trying to make the Anthropic language models say harmful/toxic/trolling things. For this dataset only examples that were rated lowly on the harmful scale (0,1,2 out of 4, where 4 is the most toxic) were included. Again, only the first lines of dialogue (instruction, first_agent_response) were retained. ### Instruction/Responses with Lists (~14,000): A set of filtered and reformatted instruction / response pairs where the agent response contains a list. Sourced from the Anthropic github (see: https://github.com/anthropics/hh-rlhf). Sourced from wikihow text lists created by b-mc2 (https://huggingface.co/datasets/b-mc2/wikihow_lists). And rejection filtered instruction response pairs generated by Chip20B that contained lists. All lists are formatted in a similar style. ### Follow-up questions (~12,500): Examples of instructions and responses where an appropriate response is to ask for more information from the prompter. These examples were generated from a combination of few shot prompted UL2 20B (to generate natural questions) and a large dialogue prompted language model to generate the responses containing follow-up questions. ### Wikipedia Toxic Adversarial Questions (~12,000): Questions and answers generated from wikipedia articles that discuss potentially sensitive topics (flagged as potentially toxic by an early toxicity detection model). ### Grade School Math GSM8K (~9,000): GSM8K is a dataset of 8.5K high quality linguistically diverse grade school math word problems created by human problem writers. The dataset is segmented into 7.5K training problems and 1K test problems. These problems take between 2 and 8 steps to solve, and solutions primarily involve performing a sequence of elementary calculations using basic arithmetic operations (+ − ×÷) to reach the final answer. A bright middle school student should be able to solve every problem. It can be used for multi-step mathematical reasoning. (https://github.com/openai/grade-school-math) ### Reasoning Instructions (~4,500): Examples from the Com2Sense and Strategy QA datasets that were reformatted into natural instructions using large language models with few shot prompting and additional quality filtering steps. ### Character and Scene Descriptions (~30,000): Examples of instructions and responses for the generation of character or scene descriptions. Scenes were sourced from video game wikis and reformatted into instruction / response format using large language models or generated by few shot prompting with large language models. ## Support this project Your contributions and feedback support the open source ecosystem, improve the bot and provide datasets for future AI research. To participate you can: Submit Github issues, track issues and help create datasets that need improvement. https://github.com/LAION-AI/Open-Instruction-Generalist Join our Discord to talk with other team members working on this! https://discord.gg/xBPBXfcFHd ## Update: March 20, 2023 - Added the metadata column to all datasets to alleviate issues with HF datasets loader. - Broke some of the p3 dialogs into parts for ease of loading. ## Disclaimer These datasets contain synthetic data and in some cases data that includes humans trying to get the language model to say toxic/offensive/trolling things. If you are concerned about the presence of this type of material in the dataset please make sure you carefully inspect each of the entries and filter appropriately. Our goal is for the model to be as helpful and non-toxic as possible and we are actively evaluating ways to reduce or eliminate undesirable content from the instruction tuning datasets. ## License The OIG dataset that is authored by LAION volunteers is released under an Apache 2.0 license. However, the data also includes content licensed under other permissive licenses such as Wikipedia data which is licensed under CC-BY-SA, or web-crawled data which is used under fair use principles. ## Acknowledgement - We would like to thank all of our amazing LAION volunteers including: @Rallio, @Jue, @Ce Zhang, @Player-1, @Laurel, @danielpatrickhug, @Jjmachan, @Mylo, @Khalid, @Coco.han, @Jordiclive, @Pszemraj, all volunteers from the Open Assistant project who initially created synthetic data, and many others. - We would like to thank Together for their tireless dedication to the open source and AI community and their contribution to many of the datasets. - We would like to thank AI Horde and user @Db0 for their incredible contribution of filtered data that were flagged as unethical. - Please check out our related project: https://github.com/LAION-AI/Open-Assistant for our work in human feedback gathering and RLHF. - Lastly, Ontocord.ai’s founders are grateful to have the opportunity to create a portion of the data augmentation and safety-moderation code for this project.
[ -0.4249058961868286, -0.7206603288650513, 0.19316458702087402, -0.05367501825094223, 0.00277087208814919, -0.14051061868667603, -0.15677762031555176, -0.46213483810424805, 0.06428290158510208, 0.47648563981056213, -0.5080980658531189, -0.5694411396980286, -0.20558710396289825, 0.0476791188...
null
null
null
null
null
null
null
null
null
null
null
null
null
medalpaca/medical_meadow_wikidoc_patient_information
medalpaca
2023-04-06T17:08:53Z
138
7
null
[ "task_categories:question-answering", "language:en", "license:cc", "region:us" ]
2023-04-06T17:08:53Z
2023-04-06T17:05:50.000Z
2023-04-06T17:05:50
--- license: cc task_categories: - question-answering language: - en --- # Dataset Card for WikiDoc For the dataset containing rephrased content from the living textbook refer to [this dataset](https://huggingface.co/datasets/medalpaca/medical_meadow_wikidoc) ## Dataset Description - **Source:** https://www.wikidoc.org/index.php/Main_Page - **Repository:** https://github.com/kbressem/medalpaca - **Paper:** TBA ### Dataset Summary This dataset containes medical question-answer pairs extracted from [WikiDoc](https://www.wikidoc.org/index.php/Main_Page), a collaborative platform for medical professionals to share and contribute to up-to-date medical knowledge. The platform has to main subsites, the "Living Textbook" and "Patient Information". The "Living Textbook" contains chapters for various medical specialties, which we crawled. We then used GTP-3.5-Turbo to rephrase the paragraph heading to a question and used the paragraph as answer. Patient Information is structured differently, in that each section subheading is already a question, making rephrasing them obsolete. **Note:** This dataset is still a WIP. While the Q/A pairs from the patient information seems to be mostly correct, the conversion using GPT-3.5-Turbo yielded some unsatisfactory results in approximately 30% of cases. We are in the process of cleaning this dataset. ### Citation Information TBA
[ -0.27475184202194214, -0.6939350366592407, 0.41736704111099243, -0.058765459805727005, -0.41571322083473206, -0.13186027109622955, -0.0009311123285442591, -0.2604008913040161, 0.4950057566165924, 0.5671190619468689, -0.7238616347312927, -0.6643248796463013, -0.36824312806129456, 0.25315475...
null
null
null
null
null
null
null
null
null
null
null
null
null
jacobthebanana/sst5_mapped_grouped
jacobthebanana
2023-04-18T19:54:38Z
138
0
null
[ "region:us" ]
2023-04-18T19:54:38Z
2023-04-17T16:59:03.000Z
2023-04-17T16:59:03
--- dataset_info: features: - name: label dtype: int64 - name: text dtype: string splits: - name: train num_bytes: 979714 num_examples: 8544 - name: test num_bytes: 253524 num_examples: 2210 - name: validation num_bytes: 127148 num_examples: 1101 download_size: 890192 dataset_size: 1360386 --- # Dataset Card for "sst5_mapped_grouped" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.45703357458114624, -0.04392174631357193, 0.36461594700813293, 0.299039363861084, -0.4404662549495697, 0.1867026686668396, 0.28555864095687866, -0.040769025683403015, 0.6724216341972351, 0.4315474331378937, -0.8307151198387146, -1.0997470617294312, -0.6363587379455566, -0.270484566688537...
null
null
null
null
null
null
null
null
null
null
null
null
null
sarus-tech/phee
sarus-tech
2023-06-21T19:36:26Z
138
1
null
[ "arxiv:2210.12560", "region:us" ]
2023-06-21T19:36:26Z
2023-06-21T14:49:19.000Z
2023-06-21T14:49:19
# PHEE dataset This dataset is port of https://github.com/ZhaoyueSun/PHEE, the data used in: [``PHEE: A Dataset for Pharmacovigilance Event Extraction from Text``](https://arxiv.org/abs/2210.12560/)
[ 0.11203530430793762, -0.3198035955429077, 0.504366397857666, -0.0852811187505722, -0.14096997678279877, -0.2989080250263214, 0.2379153072834015, -0.24214321374893188, 0.463268518447876, 0.7659720778465271, -0.5927871465682983, -0.537870466709137, -0.4398626387119293, -0.0486893430352211, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
rokset3/slimpajama
rokset3
2023-10-12T23:12:39Z
138
1
null
[ "region:us" ]
2023-10-12T23:12:39Z
2023-10-12T22:48:18.000Z
2023-10-12T22:48:18
--- dataset_info: features: - name: text dtype: string - name: meta struct: - name: redpajama_set_name dtype: string splits: - name: train num_bytes: 23874206724 num_examples: 5489000 download_size: 13962151299 dataset_size: 23874206724 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "slimpajama" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.6284350156784058, -0.3072098195552826, 0.3158193528652191, 0.19236238300800323, -0.16385191679000854, -0.09765183180570602, 0.04793228954076767, -0.1144954189658165, 1.204106092453003, 0.6246643662452698, -0.9564523100852966, -0.5864830613136292, -0.7399389743804932, -0.3055359125137329...
null
null
null
null
null
null
null
null
null
null
null
null
null
sordonia/qa-flat-mmlu
sordonia
2023-11-24T00:28:02Z
138
0
null
[ "region:us" ]
2023-11-24T00:28:02Z
2023-11-23T16:18:09.000Z
2023-11-23T16:18:09
## model_setting: openai_batched ## max_context_length: 128 ## max_tokens_instruction: 2056 ## max_tokens_response: 2056 ## top_p: 0.9 ## num_iterations: 1 ## temperature: 0.7 ## max_documents_per_subject: 100 ## max_contexts_per_subject: -1 ## icl_examples: 0 ## icl_dataset: lukaemon/mmlu ## icl_split: validation ## icl_use_options: True
[ -0.6014998555183411, -0.3414539098739624, 0.13523738086223602, 0.6416910290718079, -0.6276847124099731, -0.3068450391292572, -0.11076285690069199, 0.1100761815905571, -0.07013699412345886, 0.3646015524864197, -0.7539013624191284, -0.634154200553894, -0.6475363969802856, 0.03531904518604278...
null
null
null
null
null
null
null
null
null
null
null
null
null
Aniemore/cedr-m7
Aniemore
2022-07-01T16:39:56Z
137
5
null
[ "task_categories:text-classification", "task_ids:sentiment-classification", "annotations_creators:found", "language_creators:found", "multilinguality:monolingual", "size_categories:1K<n<10K", "source_datasets:extended|cedr", "language:ru", "license:mit", "region:us" ]
2022-07-01T16:39:56Z
2022-05-24T18:01:54.000Z
2022-05-24T18:01:54
--- annotations_creators: - found language_creators: - found language: - ru license: mit multilinguality: - monolingual pretty_name: cedr-m7 size_categories: - 1K<n<10K source_datasets: - extended|cedr task_categories: - text-classification task_ids: - sentiment-classification --- # Dataset Card for CEDR-M7 ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** - **Repository:** - **Paper:** - **Leaderboard:** - **Point of Contact:** ### Dataset Summary [More Information Needed] ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information ``` @misc{Aniemore, author = {Артем Аментес, Илья Лубенец, Никита Давидчук}, title = {Открытая библиотека искусственного интеллекта для анализа и выявления эмоциональных оттенков речи человека}, year = {2022}, publisher = {Hugging Face}, journal = {Hugging Face Hub}, howpublished = {\url{https://huggingface.com/aniemore/Aniemore}}, email = {hello@socialcode.ru} } ``` ### Contributions Thanks to [@toiletsandpaper](https://github.com/toiletsandpaper) for adding this dataset.
[ -0.5176723599433899, -0.5588390231132507, 0.24882417917251587, 0.17270053923130035, -0.31793954968452454, 0.12487272918224335, -0.3022167980670929, -0.39368438720703125, 0.6349302530288696, 0.46805456280708313, -0.9181723594665527, -1.2073160409927368, -0.7479285001754761, 0.17620918154716...
null
null
null
null
null
null
null
null
null
null
null
null
null
SetFit/CR
SetFit
2022-06-21T09:04:33Z
137
0
null
[ "region:us" ]
2022-06-21T09:04:33Z
2022-06-10T14:30:21.000Z
2022-06-10T14:30:21
# Customer Reviews This dataset is a port of the official [`CR` dataset](https://github.com/hiyouga/Dual-Contrastive-Learning/tree/main/data) from [this paper](https://www.cs.uic.edu/~liub/FBS/opinion-mining-final-WSDM.pdf). There is no validation split.
[ -0.499041348695755, -0.374557763338089, 0.3815555274486542, -0.13114085793495178, -0.5493499636650085, 0.2855845093727112, -0.11218041181564331, -0.4085918664932251, 0.2924402058124542, 0.7481019496917725, -0.870404839515686, -0.6768459677696228, -0.2638503909111023, -0.09291849285364151, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
tomekkorbak/detoxify-pile-chunk3-100000-150000
tomekkorbak
2022-10-06T02:58:25Z
137
0
null
[ "region:us" ]
2022-10-06T02:58:25Z
2022-10-03T19:41:55.000Z
2022-10-03T19:41:55
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
mattmdjaga/human_parsing_dataset
mattmdjaga
2023-09-11T09:07:44Z
137
11
null
[ "task_categories:image-segmentation", "task_ids:semantic-segmentation", "size_categories:10K<n<100K", "region:us" ]
2023-09-11T09:07:44Z
2023-03-30T17:59:37.000Z
2023-03-30T17:59:37
--- size_categories: - 10K<n<100K task_categories: - image-segmentation task_ids: - semantic-segmentation dataset_info: features: - name: image dtype: image - name: mask dtype: image splits: - name: train num_bytes: 5892290030.116 num_examples: 17706 download_size: 5893438158 dataset_size: 5892290030.116 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for Human parsing data (ATR) ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** - **Repository:** - **Paper:** - **Leaderboard:** - **Point of Contact:** ### Dataset Summary This dataset has 17,706 images and mask pairs. It is just a copy of [Deep Human Parsing](https://github.com/lemondan/HumanParsing-Dataset) ATR dataset. The mask labels are: "0": "Background", "1": "Hat", "2": "Hair", "3": "Sunglasses", "4": "Upper-clothes", "5": "Skirt", "6": "Pants", "7": "Dress", "8": "Belt", "9": "Left-shoe", "10": "Right-shoe", "11": "Face", "12": "Left-leg", "13": "Right-leg", "14": "Left-arm", "15": "Right-arm", "16": "Bag", "17": "Scarf" ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions ```bibtex @ARTICLE{ATR, author={Xiaodan Liang and Si Liu and Xiaohui Shen and Jianchao Yang and Luoqi Liu and Jian Dong and Liang Lin and Shuicheng Yan}, journal={Pattern Analysis and Machine Intelligence, IEEE Transactions on}, title={Deep Human Parsing with Active Template Regression}, year={2015}, volume={37}, number={12}, pages={2402-2414}, doi={10.1109/TPAMI.2015.2408360}, ISSN={0162-8828}, month={Dec}} @InProceedings{CO-CNN, author={Xiaodan Liang and Chunyan Xu and Xiaohui Shen and Jianchao Yang and Si Liu and Jinhui Tang and Liang Lin and Shuicheng Yan}, journal ={Pattern Analysis and Machine Intelligence, IEEE Transactions on}, title={ICCV}, year={2015}, } ```
[ -0.5203158855438232, -0.5842207670211792, 0.13769356906414032, 0.17407040297985077, -0.2343199998140335, 0.34029653668403625, -0.31155452132225037, -0.4754440486431122, 0.31083497405052185, 0.4698006808757782, -0.6513220071792603, -1.0679960250854492, -0.5385518074035645, 0.322826474905014...
null
null
null
null
null
null
null
null
null
null
null
null
null
llm-book/jawiki-sentences
llm-book
2023-10-25T15:22:05Z
137
1
null
[ "size_categories:10M<n<100M", "language:ja", "license:cc-by-sa-3.0", "license:gfdl", "region:us" ]
2023-10-25T15:22:05Z
2023-06-03T03:02:08.000Z
2023-06-03T03:02:08
--- language: - ja size_categories: - 10M<n<100M license: - cc-by-sa-3.0 - gfdl dataset_info: features: - name: text dtype: string splits: - name: train num_bytes: 3569619848 num_examples: 24387500 download_size: 1297833377 dataset_size: 3569619848 --- # Dataset Card for llm-book/jawiki-sentences 書籍『大規模言語モデル入門』で使用する Wikipedia 文のデータセットです。 GitHub リポジトリ [singletongue/wikipedia-utils](https://github.com/singletongue/wikipedia-utils) で公開されているデータセットを利用しています。 ## Licence 本データセットで使用している Wikipedia のコンテンツは、[クリエイティブ・コモンズ表示・継承ライセンス 3.0 (CC BY-SA 3.0)](https://creativecommons.org/licenses/by-sa/3.0/deed.ja) および [GNU 自由文書ライセンス (GFDL)](https://www.gnu.org/licenses/fdl.html) の下に配布されているものです。
[ -0.45974403619766235, -0.9064616560935974, 0.28195658326148987, 0.047713279724121094, -0.9304814338684082, -0.3115420341491699, -0.23922909796237946, -0.06679344177246094, 0.2646706998348236, 0.6046263575553894, -0.8757546544075012, -1.0404618978500366, -0.20121333003044128, 0.215398013591...
null
null
null
null
null
null
null
null
null
null
null
null
null
ai4privacy/pii-masking-65k
ai4privacy
2023-08-27T04:42:54Z
137
14
null
[ "size_categories:10K<n<100K", "language:en", "language:fr", "language:de", "language:it", "legal", "business", "psychology", "privacy", "region:us" ]
2023-08-27T04:42:54Z
2023-08-07T06:04:08.000Z
2023-08-07T06:04:08
--- language: - en - fr - de - it tags: - legal - business - psychology - privacy size_categories: - 10K<n<100K --- # Purpose and Features The purpose of the model and dataset is to remove personally identifiable information (PII) from text, especially in the context of AI assistants and LLMs. The model is a fine-tuned version of "Distilled BERT", a smaller and faster version of BERT. It was adapted for the task of token classification based on the largest to our knowledge open-source PII masking dataset, which we are releasing simultaneously. The model size is 62 million parameters. The original encoding of the parameters yields a model size of 268 MB, which is compressed to 43MB after parameter quantization. The models are available in PyTorch, tensorflow, and tensorflow.js The dataset is composed of ~43’000 observations. Each row starts with a natural language sentence that includes placeholders for PII and could plausibly be written to an AI assistant. The placeholders are then filled in with mocked personal information and tokenized with the BERT tokenizer. We label the tokens that correspond to PII, serving as the ground truth to train our model. The dataset covers a range of contexts in which PII can appear. The sentences span 58 sensitive data types (~117 token classes), targeting **125 discussion subjects / use cases** split across business, psychology and legal fields, and 5 interactions styles (e.g. casual conversation, formal document, emails etc...). Key facts: - Currently 5.6m tokens with 65k PII examples. - Multiple languages - Human-in-the-loop validated high quality dataset - Synthetic data generated using proprietary algorithms - Adapted from DistilBertForTokenClassification - Framework PyTorch - 8 bit quantization # Token distribution across PII classes There are 2 dataset releasees: - Original release: - [PII43k_original.jsonl](PII43k_original.jsonl) - New release with balanced token distribution: - [english_balanced_10k.jsonl](english_balanced_10k.jsonl) - [french_balanced_5k.jsonl](french_balanced_5k.jsonl) - [german_balanced_3k.jsonl](german_balanced_3k.jsonl) - [italian_balanced_3k.jsonl](italian_balanced_3k.jsonl) The new release **balances the distribution of tokens across the PII classes** covered by the dataset. This graph shows the distribution of observations across the different PII classes in the new release: ![Balanced token distribution on new release](ai4priv-distribution_including_all_languages.png) This is an important improvement, because the old release focused on just a few classes of PII and didn't provide enough examples of the other ones. This graph shows the unbalanced distribution of observations across the different PII classes in the old release: ![Unbalanced token distribution on old release]( ai4priv-distribution_old.png) Current counts of tokens per example: ![Current counts of tokens per example](token_count_per_sentence.png) # Performance evaluation | Test Precision | Test Recall | Test Accuracy | |:-:|:-:|:-:| # Community Engagement: Newsletter & updates: www.Ai4privacy.com - Looking for ML engineers, developers, beta-testers, human in the loop validators (all languages) - Integrations with already existing open source solutions # Roadmap and Future Development - Multilingual benchmarking - Extended integrations - Continuously increase the training set - Further optimisation to the model to reduce size and increase generalisability - Next released major update is planned for the 14th of July (subscribe to newsletter for updates) # Use Cases and Applications **Chatbots**: Incorporating a PII masking model into chatbot systems can ensure the privacy and security of user conversations by automatically redacting sensitive information such as names, addresses, phone numbers, and email addresses. **Customer Support Systems**: When interacting with customers through support tickets or live chats, masking PII can help protect sensitive customer data, enabling support agents to handle inquiries without the risk of exposing personal information. **Email Filtering**: Email providers can utilize a PII masking model to automatically detect and redact PII from incoming and outgoing emails, reducing the chances of accidental disclosure of sensitive information. **Data Anonymization**: Organizations dealing with large datasets containing PII, such as medical or financial records, can leverage a PII masking model to anonymize the data before sharing it for research, analysis, or collaboration purposes. **Social Media Platforms**: Integrating PII masking capabilities into social media platforms can help users protect their personal information from unauthorized access, ensuring a safer online environment. **Content Moderation**: PII masking can assist content moderation systems in automatically detecting and blurring or redacting sensitive information in user-generated content, preventing the accidental sharing of personal details. **Online Forms**: Web applications that collect user data through online forms, such as registration forms or surveys, can employ a PII masking model to anonymize or mask the collected information in real-time, enhancing privacy and data protection. **Collaborative Document Editing**: Collaboration platforms and document editing tools can use a PII masking model to automatically mask or redact sensitive information when multiple users are working on shared documents. **Research and Data Sharing**: Researchers and institutions can leverage a PII masking model to ensure privacy and confidentiality when sharing datasets for collaboration, analysis, or publication purposes, reducing the risk of data breaches or identity theft. **Content Generation**: Content generation systems, such as article generators or language models, can benefit from PII masking to automatically mask or generate fictional PII when creating sample texts or examples, safeguarding the privacy of individuals. (...and whatever else your creative mind can think of) # Support and Maintenance AI4Privacy is a project affiliated with [AISuisse SA](https://www.aisuisse.com/).
[ -0.5932114124298096, -0.7279366254806519, 0.03925907611846924, 0.32937145233154297, -0.08072075992822647, 0.1388585865497589, -0.006447086576372385, -0.7395409345626831, 0.037238869816064835, 0.47831493616104126, -0.3757311999797821, -0.5131343603134155, -0.4422394633293152, 0.035780791193...
null
null
null
null
null
null
null
null
null
null
null
null
null
EleutherAI/coqa
EleutherAI
2023-11-02T14:46:15Z
137
1
null
[ "size_categories:1K<n<10K", "language:en", "license:other", "arxiv:1808.07042", "region:us" ]
2023-11-02T14:46:15Z
2023-08-30T10:34:59.000Z
2023-08-30T10:34:59
--- license: other language: - en size_categories: - 1K<n<10K --- """CoQA dataset. This `CoQA` adds the "additional_answers" feature that's missing in the original datasets version: https://github.com/huggingface/datasets/blob/master/datasets/coqa/coqa.py """ _CITATION = """\ @misc{reddy2018coqa, title={CoQA: A Conversational Question Answering Challenge}, author={Siva Reddy and Danqi Chen and Christopher D. Manning}, year={2018}, eprint={1808.07042}, archivePrefix={arXiv}, primaryClass={cs.CL} } """ _DESCRIPTION = """\ CoQA is a large-scale dataset for building Conversational Question Answering systems. The goal of the CoQA challenge is to measure the ability of machines to understand a text passage and answer a series of interconnected questions that appear in a conversation. """ _HOMEPAGE = "https://stanfordnlp.github.io/coqa/" _LICENSE = "Different licenses depending on the content (see https://stanfordnlp.github.io/coqa/ for details)"
[ -0.7158405780792236, -0.8015375733375549, 0.16362401843070984, 0.04056734964251518, 0.10188169032335281, 0.19889287650585175, 0.06686446815729141, -0.28058168292045593, 0.3206726312637329, 0.6201709508895874, -1.1766916513442993, -0.361856609582901, -0.26057547330856323, 0.219291552901268,...
null
null
null
null
null
null
null
null
null
null
null
null
null
Lakera/gandalf_ignore_instructions
Lakera
2023-10-02T09:26:29Z
137
6
null
[ "size_categories:1K<n<10K", "language:en", "license:mit", "prompt injection", "region:us" ]
2023-10-02T09:26:29Z
2023-09-21T08:49:47.000Z
2023-09-21T08:49:47
--- language: - en license: mit size_categories: - 1K<n<10K dataset_info: features: - name: text dtype: string - name: similarity dtype: float64 splits: - name: train num_bytes: 66400 num_examples: 777 - name: validation num_bytes: 9633 num_examples: 111 - name: test num_bytes: 9747 num_examples: 112 download_size: 51515 dataset_size: 85780 configs: - config_name: default data_files: - split: train path: data/train-* - split: validation path: data/validation-* - split: test path: data/test-* tags: - prompt injection --- # gandalf_ignore_instructions ![](https://gandalf.lakera.ai/level-images/word-blacklist.jpg) This is a dataset of prompt injections from [Gandalf](https://gandalf.lakera.ai/) by [Lakera](https://www.lakera.ai/). Note that we might update the dataset occasionally by cleaning the data or adding more samples. ## How the data was obtained There are millions of prompts and many of them are not actual prompt injections (people ask Gandalf all kinds of things). We used the following process to obtain relevant data: - Start with all prompts submitted to Gandalf in July 2023. - Use [OpenAI text embeddings](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings) (`text-embedding-ada-002`) to compute the similarity between the prompts and the phrase "Ignore all previous instructions". - Filter to those prompts that have similarity of at least 0.825. - Use [Lakera Guard](https://platform.lakera.ai/docs)'s personally identifiable information detector to filter out prompts that potentially contain PII data. - Remove near-duplicates from the data (prompts that differ only by a few letters) using an approximate algorithm. This helps reduce leakage between the data splits. - Sample 1000 prompts. - Split the data into train-val-test with an 80/10/10 ratio. Each sample is assigned independently so the size of the train split is not _exactly_ 80% and so on. Note that there is a small amount of noise in the data since an automatic method was used to obtain it: a few of the samples might not be real prompt injections. ## Citation If you use this dataset in your research, please cite it as ``` @InProceedings{gandalf_ignore_instructions, title = {gandalf_ignore_instructions}, author={Lakera AI (https://www.lakera.ai)}, year={2023} } ``` ## Licensing Information gandalf_ignore_instructions is distributed under the [MIT License](https://opensource.org/license/mit/).
[ -0.4702165722846985, -1.0344364643096924, 0.5949540734291077, 0.1205630898475647, -0.10837491601705551, -0.3579719066619873, 0.17210792005062103, -0.20149974524974823, 0.04211597517132759, 0.5629984140396118, -0.5675445795059204, -0.6341792941093445, -0.6582199931144714, 0.0887946859002113...
null
null
null
null
null
null
null
null
null
null
null
null
null
mmnga/wikipedia-ja-20230720-2k
mmnga
2023-09-25T08:20:29Z
137
0
null
[ "region:us" ]
2023-09-25T08:20:29Z
2023-09-25T07:51:08.000Z
2023-09-25T07:51:08
--- dataset_info: features: - name: curid dtype: string - name: title dtype: string - name: text dtype: string splits: - name: train num_bytes: 5492016.948562663 num_examples: 2048 download_size: 3161030 dataset_size: 5492016.948562663 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "wikipedia-ja-20230720-2k" This is data extracted randomly from [izumi-lab/wikipedia-ja-20230720](https://huggingface.co/datasets/izumi-lab/wikipedia-ja-20230720), consisting of 2,048 records. [izumi-lab/wikipedia-ja-20230720](https://huggingface.co/datasets/izumi-lab/wikipedia-ja-20230720)からデータを2k分ランダムに抽出したデータです。 [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.434452623128891, -0.5310489535331726, 0.2806648910045624, 0.1449795663356781, -0.5380985736846924, -0.28658631443977356, 0.056948430836200714, -0.5406312942504883, 0.6763958930969238, 0.3767087459564209, -0.9048156142234802, -0.5649575591087341, -0.5966179966926575, -0.00492895580828189...
null
null
null
null
null
null
null
null
null
null
null
null
null
Coldog2333/bioasq
Coldog2333
2023-10-13T03:52:38Z
137
1
null
[ "region:us" ]
2023-10-13T03:52:38Z
2023-09-29T02:55:35.000Z
2023-09-29T02:55:35
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
victorcosta/eu_directives
victorcosta
2023-11-22T19:19:07Z
137
0
null
[ "language:pt", "region:us" ]
2023-11-22T19:19:07Z
2023-11-15T20:39:49.000Z
2023-11-15T20:39:49
--- language: pt configs: - config_name: ria_ground_truth data_files: ria_ground_truth/train.parquet version: 1.0.2 --- # European Union's Directives dataset (PT)
[ -0.2556181252002716, -0.12848955392837524, -0.08576464653015137, 0.6695244908332825, -0.6705873608589172, -0.24052031338214874, 0.15717308223247528, 0.5681394338607788, 0.0523412823677063, 0.7309942841529846, -0.731224000453949, -1.0593181848526, -0.675445020198822, 0.15551447868347168, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
yangdong/ecqa
yangdong
2022-03-16T14:14:41Z
136
0
null
[ "region:us" ]
2022-03-16T14:14:41Z
2022-03-16T14:14:29.000Z
2022-03-16T14:14:29
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
chainyo/rvl-cdip-invoice
chainyo
2022-04-06T16:57:20Z
136
4
null
[ "license:other", "region:us" ]
2022-04-06T16:57:20Z
2022-04-06T16:52:14.000Z
2022-04-06T16:52:14
--- license: other --- ⚠️ This only a subpart of the original dataset, containing only `invoice`. The RVL-CDIP (Ryerson Vision Lab Complex Document Information Processing) dataset consists of 400,000 grayscale images in 16 classes, with 25,000 images per class. There are 320,000 training images, 40,000 validation images, and 40,000 test images. The images are sized so their largest dimension does not exceed 1000 pixels. For questions and comments please contact Adam Harley (aharley@scs.ryerson.ca). The full dataset can be found [here](https://www.cs.cmu.edu/~aharley/rvl-cdip/). ## Labels 0: letter 1: form 2: email 3: handwritten 4: advertissement 5: scientific report 6: scientific publication 7: specification 8: file folder 9: news article 10: budget 11: invoice 12: presentation 13: questionnaire 14: resume 15: memo ## Citation This dataset is from this [paper](https://www.cs.cmu.edu/~aharley/icdar15/) `A. W. Harley, A. Ufkes, K. G. Derpanis, "Evaluation of Deep Convolutional Nets for Document Image Classification and Retrieval," in ICDAR, 2015` ## License RVL-CDIP is a subset of IIT-CDIP, which came from the [Legacy Tobacco Document Library](https://www.industrydocuments.ucsf.edu/tobacco/), for which license information can be found [here](https://www.industrydocuments.ucsf.edu/help/copyright/). ## References 1. D. Lewis, G. Agam, S. Argamon, O. Frieder, D. Grossman, and J. Heard, "Building a test collection for complex document information processing," in Proc. 29th Annual Int. ACM SIGIR Conference (SIGIR 2006), pp. 665-666, 2006 2. The Legacy Tobacco Document Library (LTDL), University of California, San Francisco, 2007. http://legacy.library.ucsf.edu/.
[ -0.33544784784317017, -0.2620612680912018, 0.5110528469085693, 0.05555447190999985, -0.05596165359020233, 0.0334903821349144, -0.10222005099058151, -0.48079371452331543, -0.42318955063819885, 0.7423476576805115, -0.27241939306259155, -0.672511100769043, -0.6671744585037231, -0.052368152886...
null
null
null
null
null
null
null
null
null
null
null
null
null
SetFit/amazon_reviews_multi_en
SetFit
2022-04-13T19:06:11Z
136
1
null
[ "license:apache-2.0", "region:us" ]
2022-04-13T19:06:11Z
2022-04-13T18:39:28.000Z
2022-04-13T18:39:28
--- license: apache-2.0 ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
bigbio/drugprot
bigbio
2023-01-06T03:30:02Z
136
2
null
[ "multilinguality:monolingual", "language:en", "license:cc-by-4.0", "region:us" ]
2023-01-06T03:30:02Z
2023-01-06T03:27:49.000Z
2023-01-06T03:27:49
--- language: - en bigbio_language: - English license: cc-by-4.0 multilinguality: monolingual bigbio_license_shortname: CC_BY_4p0 pretty_name: DrugProt homepage: https://biocreative.bioinformatics.udel.edu/tasks/biocreative-vii/track-1/ bigbio_pubmed: True bigbio_public: True bigbio_tasks: - NAMED_ENTITY_RECOGNITION - RELATION_EXTRACTION --- # Dataset Card for DrugProt ## Dataset Description - **Homepage:** https://biocreative.bioinformatics.udel.edu/tasks/biocreative-vii/track-1/ - **Pubmed:** True - **Public:** True - **Tasks:** NER,RE The DrugProt corpus consists of a) expert-labelled chemical and gene mentions, and (b) all binary relationships between them corresponding to a specific set of biologically relevant relation types. The corpus was introduced in context of the BioCreative VII Track 1 (Text mining drug and chemical-protein interactions). ## Citation Information ``` @inproceedings{miranda2021overview, title={Overview of DrugProt BioCreative VII track: quality evaluation and large scale text mining of \ drug-gene/protein relations}, author={Miranda, Antonio and Mehryary, Farrokh and Luoma, Jouni and Pyysalo, Sampo and Valencia, Alfonso \ and Krallinger, Martin}, booktitle={Proceedings of the seventh BioCreative challenge evaluation workshop}, year={2021} } ```
[ -0.03676999360322952, -0.44675925374031067, 0.5641564726829529, 0.16951899230480194, -0.14491328597068787, 0.08114482462406158, -0.021932657808065414, -0.2755427956581116, 0.5200904011726379, 0.40400826930999756, -0.49871721863746643, -0.855901300907135, -0.8476181030273438, 0.383038252592...
null
null
null
null
null
null
null
null
null
null
null
null
null
jonathan-roberts1/PatternNet
jonathan-roberts1
2023-03-31T17:06:42Z
136
0
null
[ "task_categories:image-classification", "task_categories:zero-shot-image-classification", "license:other", "region:us" ]
2023-03-31T17:06:42Z
2023-01-27T12:46:23.000Z
2023-01-27T12:46:23
--- dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': airplane '1': baseball field '2': basketball court '3': beach '4': bridge '5': cemetery '6': chaparral '7': christmas tree farm '8': closed road '9': coastal mansion '10': crosswalk '11': dense residential '12': ferry terminal '13': football field '14': forest '15': freeway '16': golf course '17': harbor '18': intersection '19': mobile home park '20': nursing home '21': oil gas field '22': oil well '23': overpass '24': parking lot '25': parking space '26': railway '27': river '28': runway '29': runway marking '30': shipping yard '31': solar panel '32': sparse residential '33': storage tank '34': swimming pool '35': tennis court '36': transformer station '37': wastewater treatment plant splits: - name: train num_bytes: 821222673.6 num_examples: 30400 download_size: 1422129774 dataset_size: 821222673.6 license: other task_categories: - image-classification - zero-shot-image-classification --- # Dataset Card for "PatternNet" ## Dataset Description - **Paper** [PatternNet: A benchmark dataset for performance evaluation of remote sensing image retrieval](https://www.sciencedirect.com/science/article/pii/S0924271618300042) ### Licensing Information For research purposes. ## Citation Information [PatternNet: A benchmark dataset for performance evaluation of remote sensing image retrieval](https://www.sciencedirect.com/science/article/pii/S0924271618300042) ``` @article{zhou2018patternnet, title = {PatternNet: A benchmark dataset for performance evaluation of remote sensing image retrieval}, author = {Zhou, Weixun and Newsam, Shawn and Li, Congmin and Shao, Zhenfeng}, year = 2018, journal = {ISPRS journal of photogrammetry and remote sensing}, publisher = {Elsevier}, volume = 145, pages = {197--209} } ```
[ -0.19561772048473358, 0.04934734106063843, 0.08476961404085159, 0.35957252979278564, -0.7960823774337769, -0.22356124222278595, -0.02913844585418701, -0.31763362884521484, 0.14241252839565277, 0.3289839029312134, -0.2808372676372528, -0.7933439016342163, -0.4909054934978485, 0.001086131203...
null
null
null
null
null
null
null
null
null
null
null
null
null
FreedomIntelligence/alpaca-gpt4-korean
FreedomIntelligence
2023-08-06T08:10:43Z
136
1
null
[ "region:us" ]
2023-08-06T08:10:43Z
2023-06-26T08:18:44.000Z
2023-06-26T08:18:44
The dataset is used in the research related to [MultilingualSIFT](https://github.com/FreedomIntelligence/MultilingualSIFT).
[ -0.40414297580718994, -0.30496302247047424, -0.004310851916670799, 0.28069862723350525, -0.06412237882614136, 0.058366622775793076, -0.2765141725540161, -0.43211790919303894, 0.4121001660823822, 0.483478307723999, -0.9163603782653809, -0.4689486026763916, -0.18480102717876434, 0.3091475665...
null
null
null
null
null
null
null
null
null
null
null
null
null
misbah1955/pharma
misbah1955
2023-10-20T08:48:04Z
136
0
null
[ "license:cc-by-4.0", "region:us" ]
2023-10-20T08:48:04Z
2023-10-20T08:47:24.000Z
2023-10-20T08:47:24
--- license: cc-by-4.0 ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
vikp/doclaynet_processed
vikp
2023-11-22T18:13:48Z
136
0
null
[ "region:us" ]
2023-11-22T18:13:48Z
2023-11-22T06:48:12.000Z
2023-11-22T06:48:12
--- dataset_info: features: - name: image dtype: image - name: bboxes sequence: sequence: float64 - name: labels sequence: int64 - name: words sequence: string - name: split dtype: string splits: - name: train num_bytes: 32034973965.125 num_examples: 80863 download_size: 0 dataset_size: 32034973965.125 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "doclaynet_processed" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.3504425883293152, -0.3962704539299011, 0.33875715732574463, 0.14884939789772034, -0.1279240995645523, -0.09316116571426392, 0.2083049863576889, -0.14567463099956512, 0.8894957304000854, 0.7542991042137146, -0.8869224190711975, -0.7607246041297913, -0.6895237565040588, -0.280615955591201...
null
null
null
null
null
null
null
null
null
null
null
null
null
Bingsu/Human_Action_Recognition
Bingsu
2022-07-05T02:48:56Z
135
7
null
[ "task_categories:image-classification", "size_categories:10K<n<100K", "source_datasets:original", "language:en", "license:odbl", "region:us" ]
2022-07-05T02:48:56Z
2022-06-09T02:00:52.000Z
2022-06-09T02:00:52
--- language: - en license: - odbl pretty_name: Human Action Recognition size_categories: - 10K<n<100K source_datasets: - original task_categories: - image-classification --- ## Dataset Description - **Homepage:** [Human Action Recognition (HAR) Dataset](https://www.kaggle.com/datasets/meetnagadia/human-action-recognition-har-dataset) - **Repository:** N/A - **Paper:** N/A - **Leaderboard:** N/A - **Point of Contact:** N/A ## Dataset Summary A dataset from [kaggle](https://www.kaggle.com/datasets/meetnagadia/human-action-recognition-har-dataset). origin: https://dphi.tech/challenges/data-sprint-76-human-activity-recognition/233/data ### Introduction - The dataset features 15 different classes of Human Activities. - The dataset contains about 12k+ labelled images including the validation images. - Each image has only one human activity category and are saved in separate folders of the labelled classes ### PROBLEM STATEMENT - Human Action Recognition (HAR) aims to understand human behavior and assign a label to each action. It has a wide range of applications, and therefore has been attracting increasing attention in the field of computer vision. Human actions can be represented using various data modalities, such as RGB, skeleton, depth, infrared, point cloud, event stream, audio, acceleration, radar, and WiFi signal, which encode different sources of useful yet distinct information and have various advantages depending on the application scenarios. - Consequently, lots of existing works have attempted to investigate different types of approaches for HAR using various modalities. - Your Task is to build an Image Classification Model using CNN that classifies to which class of activity a human is performing. ### About Files - Train - contains all the images that are to be used for training your model. In this folder you will find 15 folders namely - 'calling', ’clapping’, ’cycling’, ’dancing’, ‘drinking’, ‘eating’, ‘fighting’, ‘hugging’, ‘laughing’, ‘listeningtomusic’, ‘running’, ‘sitting’, ‘sleeping’, texting’, ‘using_laptop’ which contain the images of the respective human activities. - Test - contains 5400 images of Human Activities. For these images you are required to make predictions as the respective class names -'calling', ’clapping’, ’cycling’, ’dancing’, ‘drinking’, ‘eating’, ‘fighting’, ‘hugging’, ‘laughing’, ‘listeningtomusic’, ‘running’, ‘sitting’, ‘sleeping’, texting’, ‘using_laptop’. - Testing_set.csv - this is the order of the predictions for each image that is to be submitted on the platform. Make sure the predictions you download are with their image’s filename in the same order as given in this file. - sample_submission: This is a csv file that contains the sample submission for the data sprint. ### Data Fields The data instances have the following fields: - `image`: A `PIL.Image.Image` object containing the image. Note that when accessing the image column: `dataset[0]["image"]` the image file is automatically decoded. Decoding of a large number of image files might take a significant amount of time. Thus it is important to first query the sample index before the `"image"` column, *i.e.* `dataset[0]["image"]` should **always** be preferred over `dataset["image"][0]`. - `labels`: an `int` classification label. All `test` data is labeled 0. ### Class Label Mappings: ``` { 'calling': 0, 'clapping': 1, 'cycling': 2, 'dancing': 3, 'drinking': 4, 'eating': 5, 'fighting': 6, 'hugging': 7, 'laughing': 8, 'listening_to_music': 9, 'running': 10, 'sitting': 11, 'sleeping': 12, 'texting': 13, 'using_laptop': 14 } ``` ### Data Splits | | train | test | |---------------|--------|-----:| | # of examples | 12600 | 5400 | ### Data Size - download: 311.96 MiB - generated: 312.59 MiB - total: 624.55 MiB ```pycon >>> from datasets import load_dataset >>> ds = load_dataset("Bingsu/Human_Action_Recognition") >>> ds DatasetDict({ test: Dataset({ features: ['image', 'labels'], num_rows: 5400 }) train: Dataset({ features: ['image', 'labels'], num_rows: 12600 }) }) >>> ds["train"].features {'image': Image(decode=True, id=None), 'labels': ClassLabel(num_classes=15, names=['calling', 'clapping', 'cycling', 'dancing', 'drinking', 'eating', 'fighting', 'hugging', 'laughing', 'listening_to_music', 'running', 'sitting', 'sleeping', 'texting', 'using_laptop'], id=None)} >>> ds["train"][0] {'image': <PIL.JpegImagePlugin.JpegImageFile image mode=RGB size=240x160>, 'labels': 11} ```
[ -0.32362037897109985, -0.3341231942176819, -0.201058030128479, 0.15189357101917267, -0.23184728622436523, -0.031635746359825134, -0.0422435961663723, -0.5530625581741333, 0.190934419631958, 0.29948458075523376, -0.49721336364746094, -0.6487072706222534, -0.6514677405357361, -0.014337672851...
null
null
null
null
null
null
null
null
null
null
null
null
null
gokuls/wiki_book_corpus_complete_processed_bert_dataset
gokuls
2023-02-25T19:22:14Z
135
0
null
[ "region:us" ]
2023-02-25T19:22:14Z
2023-02-25T07:22:50.000Z
2023-02-25T07:22:50
--- dataset_info: features: - name: input_ids sequence: int32 - name: token_type_ids sequence: int8 - name: attention_mask sequence: int8 - name: special_tokens_mask sequence: int8 splits: - name: train num_bytes: 22201610400.0 num_examples: 6167114 download_size: 2763194793 dataset_size: 22201610400.0 --- # Dataset Card for "wiki_book_corpus_complete_processed_bert_dataset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5744206309318542, -0.3089395761489868, 0.20626167953014374, 0.15826350450515747, -0.20579653978347778, -0.05786469206213951, -0.18870684504508972, -0.2481960952281952, 0.7573025226593018, 0.5010349750518799, -0.7967408299446106, -0.8194096088409424, -0.4822806119918823, -0.1430010795593...
null
null
null
null
null
null
null
null
null
null
null
null
null
soymia/boudoir-dataset
soymia
2023-03-01T10:39:34Z
135
3
null
[ "task_categories:text-to-image", "size_categories:1K<n<10K", "license:apache-2.0", "region:us" ]
2023-03-01T10:39:34Z
2023-02-25T09:36:56.000Z
2023-02-25T09:36:56
--- dataset_info: features: - name: image dtype: image - name: text dtype: string splits: - name: train num_bytes: 96479861.365 num_examples: 1055 download_size: 95036573 dataset_size: 96479861.365 license: apache-2.0 task_categories: - text-to-image pretty_name: Boudoir Dataset size_categories: - 1K<n<10K --- # Dataset Card for "boudoir-dataset" ### Dataset Summary Images scrapped from selected Galleries on Behance.
[ -0.3683275282382965, -0.3166765570640564, -0.1667417585849762, 0.056895893067121506, -0.8755962252616882, -0.12211466580629349, 0.3251408636569977, 0.02314465492963791, 0.5884213447570801, 1.0028718709945679, -0.9033691883087158, -0.7777344584465027, -0.46734294295310974, -0.06276053190231...
null
null
null
null
null
null
null
null
null
null
null
null
null
BramVanroy/alpaca-cleaned-dutch
BramVanroy
2023-07-07T12:16:39Z
135
1
null
[ "task_categories:question-answering", "task_categories:text-generation", "size_categories:10K<n<100K", "language:nl", "license:cc-by-nc-4.0", "alpaca", "instruct", "instruction", "doi:10.57967/hf/0530", "region:us" ]
2023-07-07T12:16:39Z
2023-04-12T07:02:22.000Z
2023-04-12T07:02:22
--- license: cc-by-nc-4.0 task_categories: - question-answering - text-generation language: - nl tags: - alpaca - instruct - instruction pretty_name: Alpaca Cleaned Dutch size_categories: - 10K<n<100K --- # Dataset Card for Alpaca Cleaned Dutch ## Dataset Description - **Homepage:** N/A - **Repository:** N/A - **Paper:** N/A - **Leaderboard:** N/A - **Point of Contact:** Bram Vanroy ### Dataset Summary This dataset contains 51,712 conversations between een AI assistant and a (fake) "Human" (generated) in Dutch. They are translations of [Alpaca Cleaned Dataset](https://huggingface.co/datasets/yahma/alpaca-cleaned). ☕ [**Want to help me out?**](https://www.buymeacoffee.com/bramvanroy) Translating the data with the OpenAI API, and prompt testing, cost me 💸$57.99💸. If you like this dataset, please consider [buying me a coffee](https://www.buymeacoffee.com/bramvanroy) to offset a portion of this cost, I appreciate it a lot! ☕ ### Languages - Dutch ## Dataset Structure ### Data Instances ```python { 'id': 7, 'instruction': 'Leg uit waarom de volgende breuk gelijk is aan 1/4', 'input': '4/16', 'output': 'De breuk 4/16 is gelijk aan 1/4 omdat zowel de teller als de ' 'noemer deelbaar zijn door 4. Door zowel de teller als de noemer ' 'door 4 te delen, krijgen we de breuk 1/4.' } ``` ### Data Fields - **id**: the ID of the item. The following ID is not included because they could not be translated: `[23019]` - **instruction**: the given instruction **input**: optional input to accompany the instruction. Can be empty. - **output**: the "answer" to the instruction ## Dataset Creation The instructions, inputs and outputs were translated with OpenAI's API for `gpt-3.5-turbo`. `max_tokens=1024, temperature=0` as parameters. The prompt template to translate is (where `src_lang` is English and `tgt_lang` is Dutch): ```python TRANSLATION_PROMPT = """You are asked to translate a task's instruction, optional input to the task, and the output of the task, from {src_lang} into {tgt_lang}. Here are the requirements that you should adhere to: 1. maintain the format: the task consists of a task instruction (marked `instruction: `), optional input to the task (marked `input: `) and output for the task marked with `output: `; 2. do not translate the identifiers `instruction: `, `input: `, and `output: ` but instead copy them to your output; 3. make sure that text is fluent to read and does not contain grammatical errors. Use standard {tgt_lang} without regional bias; 4. translate the instruction and input text using informal, but standard, language; 5. make sure to avoid biases (such as gender bias, grammatical bias, social bias); 6. if the instruction is to correct grammar mistakes or spelling mistakes then you have to generate a similar mistake in the input in {tgt_lang}, and then also generate a corrected output version in the output in {tgt_lang}; 7. if the instruction is to translate text from one language to another, then you do not translate the text that needs to be translated in the instruction or the input, nor the translation in the output (just copy them as-is); 8. do not translate code fragments but copy them to your output. If there are English examples, variable names or definitions in code fragments, keep them in English. Now translate the following task with the requirements set out above. Do not provide an explanation and do not add anything else.\n\n""" ``` This prompt is concatenated with the instruction, optionally the input, and the output. In code, that last part looks like this: ```python text = f'instruction: "{instruction}"\n\n' if inputstr: text += f'input: "{inputstr}"\n\n' text += f'output: "{outputstr}"' ``` The system message was: ``` You are a helpful assistant that translates English to Dutch to the requirements that are given to you. ``` Note that 1 item (0.0001%) was not successfully translated. The translation was missing the input, instruction, or output keywords where those were expected. The ID for the missing item is `[23019]`. ### Source Data #### Initial Data Collection and Normalization Initial data creation by [Tatsu lab](https://huggingface.co/datasets/tatsu-lab/alpaca) and cleaned by [Yahma](https://huggingface.co/datasets/yahma/alpaca-cleaned). #### Who are the source language producers? The original dataset was generated with OpenAI's `text-davinci-003`. ## Considerations for Using the Data Note that the translations in this new dataset have not been verified by humans. ### Discussion of Biases As with any machine-generated texts, users should be aware of potential biases that are included in this dataset. Although the prompt specifically includes `make sure to avoid biases (such as gender bias, grammatical bias, social bias)`, of course the impact of such command is not known. It is likely that biases remain in the dataset so use with caution. ### Other Known Limitations The translation quality has not been verified. Use at your own risk! ### Licensing Information As per OpenAI's terms of use, this dataset cannot be used to build [a commercial system that competes with OpenAI's services](https://openai.com/policies/terms-of-use). Similar to the original Alpaca dataset, this dataset is released under CC NC 4.0. This text was generated (either in part or in full) with GPT-3 (`gpt-3.5-turbo`), OpenAI’s large-scale language-generation model. Upon generating draft language, the author reviewed, edited, and revised the language to their own liking and takes ultimate responsibility for the content of this publication. If you use this dataset, you must also follow the [Sharing](https://openai.com/policies/sharing-publication-policy) and [Usage](https://openai.com/policies/usage-policies) policies. As clearly stated in their [Terms of Use](https://openai.com/policies/terms-of-use), specifically 2c.iii, "[you may not] use output from the Services to develop models that compete with OpenAI". That means that you cannot use this dataset to build models that are intended to commercially compete with OpenAI. [As far as I am aware](https://law.stackexchange.com/questions/93308/licensing-material-generated-with-chatgpt), that is a specific restriction that should serve as an addendum to the current license. ### Citation Information If you use this data set, please cite : Vanroy, B. (2023). Alpaca Cleaned Dutch [Data set]. Hugging Face. https://doi.org/10.57967/HF/0530 ```bibtex @misc{https://doi.org/10.57967/hf/0530, doi = {10.57967/HF/0530}, url = {https://huggingface.co/datasets/BramVanroy/alpaca-cleaned-dutch}, author = {Vanroy, Bram}, title = {{A}lpaca {C}leaned {D}utch}, publisher = {Hugging Face}, year = {2023} } ``` ### Contributions Thanks to [Tatsu lab](https://huggingface.co/datasets/tatsu-lab/alpaca) for the initial machine-generated dataset and yahma for [cleaning it](https://huggingface.co/datasets/yahma/alpaca-cleaned).
[ -0.3570720851421356, -0.765691339969635, 0.06405934691429138, 0.3808674216270447, -0.34604570269584656, -0.5947088599205017, -0.37820467352867126, -0.5580022931098938, 0.38154253363609314, 0.5583643913269043, -0.6222975254058838, -0.5832767486572266, -0.6396692991256714, 0.3342284262180328...
null
null
null
null
null
null
null
null
null
null
null
null
null
tasksource/seahorse_summarization_evaluation
tasksource
2023-05-23T09:46:41Z
135
0
null
[ "task_categories:summarization", "language:de", "language:en", "language:ru", "language:tr", "language:vi", "license:cc", "arxiv:2305.13194", "region:us" ]
2023-05-23T09:46:41Z
2023-05-23T09:45:14.000Z
2023-05-23T09:45:14
--- license: cc task_categories: - summarization language: - de - en - ru - tr - vi --- ```bib @misc{clark2023seahorse, title={SEAHORSE: A Multilingual, Multifaceted Dataset for Summarization Evaluation}, author={Elizabeth Clark and Shruti Rijhwani and Sebastian Gehrmann and Joshua Maynez and Roee Aharoni and Vitaly Nikolaev and Thibault Sellam and Aditya Siddhant and Dipanjan Das and Ankur P. Parikh}, year={2023}, eprint={2305.13194}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ -0.32681941986083984, -0.027111999690532684, -0.03304386883974075, 0.2985578179359436, -0.4283202588558197, 0.04740283638238907, -0.06668609380722046, -0.637767493724823, 0.7248334884643555, 0.2803269028663635, -0.25251513719558716, -0.4591626226902008, -0.696029543876648, 0.48496204614639...
null
null
null
null
null
null
null
null
null
null
null
null
null
BuffetFS/BUFFET
BuffetFS
2023-08-13T06:26:30Z
135
4
null
[ "license:mit", "region:us" ]
2023-08-13T06:26:30Z
2023-05-24T00:53:16.000Z
2023-05-24T00:53:16
--- license: mit --- # BUFFET: Benchmarking Large Language Models for Cross-lingual Few-shot Transfer - Project page: [buffetfs.github.io/](https://buffetfs.github.io/) ([Paper](https://buffetfs.github.io/static/files/buffet_paper.pdf)) # Dataset Card for BEIR Benchmark ## Table of Contents - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) ## Dataset Description - **Homepage:** https://buffetfs.github.io/ - **Repository:** https://github.com/AkariAsai/BUFFET - **Paper:** https://buffetfs.github.io/static/files/buffet_paper.pdf - **Point of Contact:** akari@cs.washigton.edu ### Dataset Summary <b>BUFFET</b> unifies 15 diverse NLP datasets in typologically diverse 54 languages. The list of the datasets is available below. We are currently working on Dataset summary, and will update the descriptions shortly!
[ -0.4247821569442749, -0.3034086227416992, -0.1454085260629654, 0.549278736114502, -0.13142670691013336, -0.12146099656820297, -0.4563100039958954, -0.5231173038482666, 0.1342000961303711, 0.5756018161773682, -0.48874738812446594, -0.713029682636261, -0.48180970549583435, 0.2210048139095306...
null
null
null
null
null
null
null
null
null
null
null
null
null
shirsh10mall/LLM_Instruct_Learning_Project_Preprocessed_Tokenized_Open_Orca_Dataset_Flan_T5
shirsh10mall
2023-08-08T11:52:51Z
135
1
null
[ "region:us" ]
2023-08-08T11:52:51Z
2023-07-25T13:53:22.000Z
2023-07-25T13:53:22
--- dataset_info: features: - name: system_prompt dtype: string - name: question dtype: string - name: response dtype: string - name: input_ids sequence: int32 - name: attention_mask sequence: int8 - name: labels sequence: int64 - name: Inputs Token length dtype: int64 - name: Response Token length dtype: int64 splits: - name: train num_bytes: 1283943963.5926845 num_examples: 430318 - name: test num_bytes: 226579926.12734038 num_examples: 75939 download_size: 588711752 dataset_size: 1510523889.7200248 configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* --- # Dataset Card for "temp_data_LLM_Project" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.41547074913978577, -0.15078316628932953, 0.2698219120502472, 0.11338067799806595, -0.332089900970459, 0.18869122862815857, 0.22957032918930054, 0.17406494915485382, 0.8843661546707153, 0.48562419414520264, -0.8084670305252075, -0.9812965393066406, -0.5219528675079346, -0.262641966342926...
null
null
null
null
null
null
null
null
null
null
null
null
null
ealvaradob/phishing-dataset
ealvaradob
2023-11-19T17:20:30Z
135
0
null
[ "task_categories:text-classification", "size_categories:10K<n<100K", "language:en", "license:mit", "region:us" ]
2023-11-19T17:20:30Z
2023-11-10T21:15:02.000Z
2023-11-10T21:15:02
--- license: mit task_categories: - text-classification language: - en size_categories: - 10K<n<100K ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
tomekkorbak/pile-curse-chunk-12
tomekkorbak
2022-03-18T22:06:33Z
134
0
null
[ "region:us" ]
2022-03-18T22:06:33Z
2022-03-18T22:05:44.000Z
2022-03-18T22:05:44
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
tomekkorbak/detoxify-pile-chunk3-150000-200000
tomekkorbak
2022-10-06T02:24:38Z
134
0
null
[ "region:us" ]
2022-10-06T02:24:38Z
2022-10-03T19:42:50.000Z
2022-10-03T19:42:50
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622264862060547, 0.43461528420448303, -0.52829909324646, 0.7012971639633179, 0.7915720343589783, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104477167129517, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
medalpaca/medical_meadow_cord19
medalpaca
2023-04-06T16:47:03Z
134
4
null
[ "task_categories:summarization", "size_categories:100K<n<1M", "language:en", "region:us" ]
2023-04-06T16:47:03Z
2023-04-06T16:24:06.000Z
2023-04-06T16:24:06
--- task_categories: - summarization language: - en size_categories: - 100K<n<1M --- # CORD 19 ## Dataset Description - **Homepage:** https://www.kaggle.com/datasets/allen-institute-for-ai/CORD-19-research-challenge ### Dataset Summary In response to the COVID-19 pandemic, the White House and a coalition of leading research groups have prepared the COVID-19 Open Research Dataset (CORD-19). CORD-19 is a resource of over 1,000,000 scholarly articles, including over 400,000 with full text, about COVID-19, SARS-CoV-2, and related coronaviruses. This freely available dataset is provided to the global research community to apply recent advances in natural language processing and other AI techniques to generate new insights in support of the ongoing fight against this infectious disease. This is a processed version of the dataset, where we removed some empty entries and formated it to be compatible with the alpaca training. For more details on the data, please refer to the original publicatio. ### Citation Information ``` @inproceedings{wang-etal-2020-cord, title = "{CORD-19}: The {COVID-19} Open Research Dataset", author = "Wang, Lucy Lu and Lo, Kyle and Chandrasekhar, Yoganand and Reas, Russell and Yang, Jiangjiang and Burdick, Doug and Eide, Darrin and Funk, Kathryn and Katsis, Yannis and Kinney, Rodney Michael and Li, Yunyao and Liu, Ziyang and Merrill, William and Mooney, Paul and Murdick, Dewey A. and Rishi, Devvret and Sheehan, Jerry and Shen, Zhihong and Stilson, Brandon and Wade, Alex D. and Wang, Kuansan and Wang, Nancy Xin Ru and Wilhelm, Christopher and Xie, Boya and Raymond, Douglas M. and Weld, Daniel S. and Etzioni, Oren and Kohlmeier, Sebastian", booktitle = "Proceedings of the 1st Workshop on {NLP} for {COVID-19} at {ACL} 2020", month = jul, year = "2020", address = "Online", publisher = "Association for Computational Linguistics", url = "https://www.aclweb.org/anthology/2020.nlpcovid19-acl.1" } ```
[ -0.34510546922683716, -0.7911881804466248, -0.1845555156469345, 0.3009379208087921, -0.24598479270935059, 0.02808976173400879, -0.3402246832847595, -0.4931734502315521, 0.35185232758522034, 0.08108033984899521, -0.7002930045127869, -0.6068199276924133, -0.47085025906562805, 0.0801677107810...
null
null
null
null
null
null
null
null
null
null
null
null
null
rokmr/pets
rokmr
2023-07-25T19:20:21Z
134
5
null
[ "task_categories:image-classification", "size_categories:n<1K", "language:en", "license:mit", "region:us" ]
2023-07-25T19:20:21Z
2023-07-08T09:44:28.000Z
2023-07-08T09:44:28
--- license: mit task_categories: - image-classification language: - en size_categories: - n<1K splits: - name: train MB: 20.4 num_examples: 1050 - name: test MB: 3.29 num_examples: 150 download_size: 24 MB dataset_size: 1050 --- ### Dataset Summary Mini(24 MB) Classification dataset for mini projects. Cats, dogs and rabbit are included as pet in this dataset. ### Supported Tasks and Leaderboards image-classification: Based on a pet image, the goal of this task is to predict the type of pet (i.e., dog or cat or rabbit). ### Languages English ### Class Label Mappings: ```json { "cat": 0, "dog": 1, "rabbit": 2, } ``` ### Load Dataset ```python from datasets import load_dataset train_dataset = load_dataset("rokmr/pets", split="train") test_dataset = load_dataset("rokmr/pets", split="test") #To view label print(train_dataset[0]["label"]) #To view image train_dataset[0]["image"] ``` ![image](https://raw.githubusercontent.com/rokmr/Deep-Learning-In-Computer-Vision/main/datasets/pet_dataset.png) ### Citation Information ``` @ONLINE {mini_pets, author="Rohit Kumar", title="Pets dataset", month="July", year="2023", url="https://github.com/rokmr" } ```
[ -0.37672269344329834, -0.23439016938209534, -0.027376223355531693, 0.04527035728096962, -0.25598445534706116, 0.06964452564716339, -0.1855192929506302, -0.4801435172557831, 0.2449272722005844, 0.2579832673072815, -0.286176860332489, -0.6649641990661621, -0.6392369270324707, 0.1071505546569...
null
null
null
null
null
null
null
null
null
null
null
null
null
verbrannter/invoice_dataset_large_cleaned_2
verbrannter
2023-07-16T14:59:01Z
134
2
null
[ "region:us" ]
2023-07-16T14:59:01Z
2023-07-16T12:56:56.000Z
2023-07-16T12:56:56
--- dataset_info: features: - name: image dtype: image - name: ground_truth dtype: string splits: - name: train num_bytes: 7090191899.096405 num_examples: 25434 - name: test num_bytes: 1772687358.9035952 num_examples: 6359 download_size: 1645061199 dataset_size: 8862879258.0 --- # Dataset Card for "invoice_dataset_large_cleaned_2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.19973009824752808, -0.07624287903308868, 0.06480775028467178, 0.13921163976192474, -0.3799703121185303, -0.08901014178991318, 0.17437148094177246, -0.2241172045469284, 0.6340630650520325, 0.947910487651825, -0.558483362197876, -0.4765462577342987, -0.41591107845306396, -0.37644135951995...
null
null
null
null
null
null
null
null
null
null
null
null
null
FinGPT/fingpt-ner
FinGPT
2023-10-10T06:33:43Z
134
0
null
[ "region:us" ]
2023-10-10T06:33:43Z
2023-10-10T06:33:18.000Z
2023-10-10T06:33:18
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: input dtype: string - name: output dtype: string - name: instruction dtype: string splits: - name: train num_bytes: 241523 num_examples: 511 - name: test num_bytes: 63634 num_examples: 98 download_size: 105426 dataset_size: 305157 --- # Dataset Card for "fingpt-ner" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.8072187900543213, -0.3917463421821594, 0.06889092922210693, 0.0668393224477768, -0.29218611121177673, -0.04510948434472084, 0.2893367409706116, -0.17517955601215363, 0.7754958868026733, 0.5575497150421143, -0.7945224642753601, -0.6700302958488464, -0.6442440152168274, -0.226651653647422...
null
null
null
null
null
null
null
null
null
null
null
null
null
knowrohit07/know_medical_dialogue_v2
knowrohit07
2023-10-27T14:44:51Z
134
25
null
[ "license:openrail", "region:us" ]
2023-10-27T14:44:51Z
2023-10-12T12:22:27.000Z
2023-10-12T12:22:27
--- license: openrail --- ### Description: The knowrohit07/know_medical_dialogues_v2 dataset is a collection of conversational exchanges between patients and doctors on various medical topics. It aims to capture the intricacies, uncertainties, and questions posed by individuals regarding their health and the medical guidance provided in response. ### 🎯 Intended Use: This dataset is crafted for training Large Language Models (LLMs) with a focus on understanding and generating medically-informed dialogue. It's ideal for LLM applications aiming to provide medical information or insights, especially for scenarios with limited access to healthcare resources. ❗ Limitations: While this dataset includes diverse interactions, it doesn't cover every medical scenario. Models trained on this data should be viewed as an additional resource, not a substitute for professional medical consultation. 📌 Data Source: Conversational seed tasks or exchanges were collected from anonymized patient-doctor interactions and synthetically made using GPT4. 📋 Collection Methodology: The data was meticulously curated to ensure no personally identifiable information remained. All conversations are representative of general concerns and advice, without specific case details. ### Advantages of the Dataset: Broad Spectrum: The dataset encompasses a wide array of medical queries and advice, making it valuable for general medical conversational AI. Diverse Interactions: It captures everything from symptom queries to post-care instructions. Training Potential for LLMs: Specifically tailored for fine-tuning LLMs for medical conversations, enhancing the resultant model's capability in this domain. ⚖️ Ethical and Impact Considerations: Positive Impact: Utilizing LLMs trained on this dataset can be invaluable for healthcare professionals, especially in regions with limited medical datasets. When deployed on affordable local devices, doctors can leverage an AI-assisted tool, enhancing their consultation and decision-making processes. Potential Risks: There's an inherent risk of the model providing guidance that may not match the latest medical guidelines or specific patient requirements. It's crucial to clarify to users that outputs from the LLM should complement professional medical opinions. Recommendation: Encourage healthcare professionals to use this tool as an initial point of reference and not as the primary foundation for medical decisions.
[ -0.011119849048554897, -0.4081454277038574, 0.7024998068809509, -0.0918867215514183, -0.3188649117946625, -0.15665379166603088, -0.09711775183677673, -0.6535590291023254, 0.18567249178886414, 0.778520941734314, -0.7682592868804932, -0.6483136415481567, -0.5971935987472534, -0.1909811347723...
null
null
null
null
null
null
null
null
null
null
null
null
null
yairschiff/translation
yairschiff
2023-11-21T16:18:02Z
134
0
null
[ "region:us" ]
2023-11-21T16:18:02Z
2023-11-21T16:17:58.000Z
2023-11-21T16:17:58
--- dataset_info: features: - name: Source dtype: string - name: Target dtype: string - name: Prediction dtype: string splits: - name: train num_bytes: 70948939 num_examples: 232825 - name: validation num_bytes: 296587 num_examples: 890 - name: test num_bytes: 2543037 num_examples: 8597 download_size: 45184309 dataset_size: 73788563 configs: - config_name: default data_files: - split: train path: data/train-* - split: validation path: data/validation-* - split: test path: data/test-* ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
dominguesm/alpaca-data-pt-br
dominguesm
2023-11-17T08:51:52Z
133
11
null
[ "task_categories:text-generation", "language:pt", "license:cc-by-nc-4.0", "instruction-finetuning", "region:us" ]
2023-11-17T08:51:52Z
2023-03-17T22:48:45.000Z
2023-03-17T22:48:45
--- dataset_info: features: - name: instruction dtype: string - name: input dtype: string - name: output dtype: string splits: - name: train num_bytes: 18324606 num_examples: 51759 download_size: 11508709 dataset_size: 18324606 license: cc-by-nc-4.0 language: - pt tags: - instruction-finetuning pretty_name: Alpaca-Cleaned-PTBR task_categories: - text-generation --- **NOTE:** This is a machine translated version of the [yahma/alpaca-cleaned](https://huggingface.co/datasets/yahma/alpaca-cleaned) dataset. # Dataset Card for Alpaca-Cleaned - **Repository:** https://github.com/gururise/AlpacaDataCleaned ## Dataset Description This is a cleaned version of the original Alpaca Dataset released by Stanford. The following issues have been identified in the original release and fixed in this dataset: 1. **Hallucinations:** Many instructions in the original dataset had instructions referencing data on the internet, which just caused GPT3 to hallucinate an answer. ``` "instruction":"Summarize the given article in 200 Words.", "input": "https://www.bbc.com/news/world-51461830", "output": "The recent protests in Belarus are part of a growing wave of discontent against the rule of Alexander Lukashenko, who has led the country since 1994. Demonstrations against his government have been ongoing since August, and come in the wake of an election widely seen as rigged. As police and security forces have cracked down on protesters, the movement has only grown, with hundreds of thousands of people taking to the streets to call for new leadership. Human rights activists have accused the government of using extreme violence in order prosecute protesters, leading to an international outcry and sanctions from the European Union. As the situation in Belarus continues to unfold, the international community is monitoring the situation closely - hoping for a peaceful resolution, while also standing firm against the oppressive tactics employed by the Belarusian state." "instruction": "Classify the following shape according to simple geometry.", "input": "<Picture Attached>", "output": "Triangle" "instruction": "Given a recent news article, write a headline for it.", "input": "https://www.news.example.com/article-1", "output": "\"NASA Launches a Mars 2020 Mission to Search for Signs of Ancient Life\"" ``` 2. **Merged Instructions:** There were many instructions that were merged together in the original dataset for some reason. ``` "instruction": "Should I invest in stocks?", "input": "", "output": "It depends on your individual situation. Investing in stocks carries a certain amount of risk but can also offer potential rewards. Make sure to research the different types of stocks, understand the risks and rewards, and develop a plan that works for you.\n##\n18. Instruction: Identify the noun in the following sentence:\n18. Input:\nThe large apple was delicious.\n18. Output:\nApple" ``` 3. **Empty outputs:** Some entries in the original dataset had empty outputs. ``` "instruction": "Calculate the area of the following shape in square centimeters.", "input": "rectangle of size 4 cm x 5 cm", "output": "" ``` 4. **Empty code examples:** Some descriptions in the original dataset were missing code examples, making it difficult to understand the intended behavior of the code. 5. **Instructions to generate images:** Some descriptions in the original dataset included instructions to generate images, something obviously not possible. ``` "instruction": "Create a graphic or logo that visually represents the word \"courage\".", "input": "", "output": "<No Output>" ``` 6. **N/A outputs:** Some code snippets in the original dataset had N/A outputs. 7. **Inconsistent input field:** The original dataset had inconsistent usage of the input field when it was supposed to be empty. ``` "input":"<no input>" "input":"No input" "input":"noinput" "input":"<noinput>" ``` 8. **Wrong answers:** Some instructions/questions in the original dataset had incorrect answers. About 80% of the math problems are estimated to have incorrect answers. ``` "instruction": "Calculate the median of the following data set.", "input": "1, 2, 4, 5, 8, 9", "output": "5" "instruction": "Convert 25m to km.", "input": "", "output": "25km" ``` 9. **Non-Sensical/Unclear instructions:** Many instructions are unclear, we try to clarify (or re-write) if instructions are non-sensical. Instructions that are slightly unclear, but where one could deduce the meaning are not altered. ``` "instruction": "Freeze the following sample of yogurt for 10 minutes.", "input": "Yogurt sample", "output": "<noinput>" "instruction": "Increase the font size to 12 points.", "input": "", "output": "The font size has been increased to 12 points." ``` 10. **Extraneous escape and control characters:** The original dataset had several entries with extraneous escape and control characters. ### Original Alpaca Dataset Summary Alpaca is a dataset of 52,000 instructions and demonstrations generated by OpenAI's `text-davinci-003` engine. This instruction data can be used to conduct instruction-tuning for language models and make the language model follow instruction better. The authors built on the data generation pipeline from [Self-Instruct framework](https://github.com/yizhongw/self-instruct) and made the following modifications: - The `text-davinci-003` engine to generate the instruction data instead of `davinci`. - A [new prompt](https://github.com/tatsu-lab/stanford_alpaca/blob/main/prompt.txt) was written that explicitly gave the requirement of instruction generation to `text-davinci-003`. - Much more aggressive batch decoding was used, i.e., generating 20 instructions at once, which significantly reduced the cost of data generation. - The data generation pipeline was simplified by discarding the difference between classification and non-classification instructions. - Only a single instance was generated for each instruction, instead of 2 to 3 instances as in Self-Instruct. This produced an instruction-following dataset with 52K examples obtained at a much lower cost (less than $500). In a preliminary study, the authors also found that the 52K generated data to be much more diverse than the data released by [Self-Instruct](https://github.com/yizhongw/self-instruct/blob/main/data/seed_tasks.jsonl). ### Supported Tasks and Leaderboards The Alpaca dataset designed for instruction training pretrained language models. ### Languages The data in Alpaca are in English (BCP-47 en). ## Dataset Structure ### Data Instances An example of "train" looks as follows: ```json { "instruction": "Create a classification task by clustering the given list of items.", "input": "Apples, oranges, bananas, strawberries, pineapples", "output": "Class 1: Apples, Oranges\nClass 2: Bananas, Strawberries\nClass 3: Pineapples", "text": "Below is an instruction that describes a task, paired with an input that provides further context. Write a response that appropriately completes the request.\n\n### Instruction:\nCreate a classification task by clustering the given list of items.\n\n### Input:\nApples, oranges, bananas, strawberries, pineapples\n\n### Response:\nClass 1: Apples, Oranges\nClass 2: Bananas, Strawberries\nClass 3: Pineapples", } ``` ### Data Fields The data fields are as follows: * `instruction`: describes the task the model should perform. Each of the 52K instructions is unique. * `input`: optional context or input for the task. For example, when the instruction is "Summarize the following article", the input is the article. Around 40% of the examples have an input. * `output`: the answer to the instruction as generated by `text-davinci-003`. * `text`: the `instruction`, `input` and `output` formatted with the [prompt template](https://github.com/tatsu-lab/stanford_alpaca#data-release) used by the authors for fine-tuning their models. ### Data Splits | | train | |---------------|------:| | alpaca | 52002 | ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset Excerpt the [blog post](https://crfm.stanford.edu/2023/03/13/alpaca.html) accompanying the release of this dataset: > We believe that releasing the above assets will enable the academic community to perform controlled scientific studies on instruction-following language models, resulting in better science and ultimately new techniques to address the existing deficiencies with these models. At the same time, any release carries some risk. First, we recognize that releasing our training recipe reveals the feasibility of certain capabilities. On one hand, this enables more people (including bad actors) to create models that could cause harm (either intentionally or not). On the other hand, this awareness might incentivize swift defensive action, especially from the academic community, now empowered by the means to perform deeper safety research on such models. Overall, we believe that the benefits for the research community outweigh the risks of this particular release. Given that we are releasing the training recipe, we believe that releasing the data, model weights, and training code incur minimal further risk, given the simplicity of the recipe. At the same time, releasing these assets has enormous benefits for reproducible science, so that the academic community can use standard datasets, models, and code to perform controlled comparisons and to explore extensions. Deploying an interactive demo for Alpaca also poses potential risks, such as more widely disseminating harmful content and lowering the barrier for spam, fraud, or disinformation. We have put into place two risk mitigation strategies. First, we have implemented a content filter using OpenAI’s content moderation API, which filters out harmful content as defined by OpenAI’s usage policies. Second, we watermark all the model outputs using the method described in Kirchenbauer et al. 2023, so that others can detect (with some probability) whether an output comes from Alpaca 7B. Finally, we have strict terms and conditions for using the demo; it is restricted to non-commercial uses and to uses that follow LLaMA’s license agreement. We understand that these mitigation measures can be circumvented once we release the model weights or if users train their own instruction-following models. However, by installing these mitigations, we hope to advance the best practices and ultimately develop community norms for the responsible deployment of foundation models. ### Discussion of Biases [More Information Needed] ### Other Known Limitations The `alpaca` data is generated by a language model (`text-davinci-003`) and inevitably contains some errors or biases. We encourage users to use this data with caution and propose new methods to filter or improve the imperfections. ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information The dataset is available under the [Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)](https://creativecommons.org/licenses/by-nc/4.0/). ### Citation Information ``` @misc{alpaca, author = {Rohan Taori and Ishaan Gulrajani and Tianyi Zhang and Yann Dubois and Xuechen Li and Carlos Guestrin and Percy Liang and Tatsunori B. Hashimoto }, title = {Stanford Alpaca: An Instruction-following LLaMA model}, year = {2023}, publisher = {GitHub}, journal = {GitHub repository}, howpublished = {\url{https://github.com/tatsu-lab/stanford_alpaca}}, } ``` ### Contributions [More Information Needed]
[ -0.4380626976490021, -1.0425522327423096, 0.23150773346424103, -0.026518264785408974, -0.18014992773532867, -0.32005947828292847, 0.007864782586693764, -0.3185918629169464, 0.24983449280261993, 0.6073174476623535, -0.8881841897964478, -0.5723432898521423, -0.6502535343170166, -0.0050911880...
null
null
null
null
null
null
null
null
null
null
null
null
null
nRuaif/Vietnamese_x_Alpaca
nRuaif
2023-04-21T05:32:45Z
133
6
null
[ "license:mit", "region:us" ]
2023-04-21T05:32:45Z
2023-04-21T05:27:15.000Z
2023-04-21T05:27:15
--- license: mit ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
bugdaryan/sql-create-context-instruction
bugdaryan
2023-09-07T15:22:00Z
133
5
null
[ "task_categories:text-generation", "task_categories:question-answering", "task_categories:table-question-answering", "size_categories:10K<n<100K", "language:en", "license:cc-by-4.0", "SQL", "code", "NLP", "text-to-sql", "context-sql", "spider", "wikisql", "sqlglot", "region:us" ]
2023-09-07T15:22:00Z
2023-09-06T14:06:13.000Z
2023-09-06T14:06:13
--- license: cc-by-4.0 task_categories: - text-generation - question-answering - table-question-answering language: - en tags: - SQL - code - NLP - text-to-sql - context-sql - spider - wikisql - sqlglot pretty_name: sql-create-context size_categories: - 10K<n<100K --- ## Overview This dataset is built upon [SQL Create Context](https://huggingface.co/datasets/b-mc2/sql-create-context), which in turn was constructed using data from [WikiSQL](https://huggingface.co/datasets/wikisql) and [Spider](https://huggingface.co/datasets/spider). There are 78,577 examples of natural language queries, SQL CREATE TABLE statements, and SQL Query answering the question using the CREATE statement as context. This dataset was built with text-to-SQL LLMs in mind, intending to prevent hallucination of column and table names often seen when trained on text-to-SQL datasets. The CREATE TABLE statement can often be copy and pasted from different DBMS and provides table names, column names, and their data types. By providing just the CREATE TABLE statement as context, we can hopefully provide better grounding for models without having to provide actual rows of data, limiting token usage and exposure to private, sensitive, or proprietary data. ## Cleansing and Augmentation Cleansing and data augmentation have been done on the combined WikiSQL and Spider data. [SQLGlot](https://github.com/tobymao/sqlglot) was used on queries from Spider and WikiSQL to parse them into different tables and columns. Column data types were inferred based on the usage of `>` `<` operators as well as the use of `MIN()`, `MAX()`, `AVG()`, and `SUM()` on columns. While this isn't perfect, it increases the likelihood of inferring the correct datatype for a column; otherwise, columns default to VARCHAR type. These tables and columns are then used to generate CREATE TABLE statements using the inferred types. SQLGlot is used again to ensure both the SQL queries and CREATE TABLE statements parse without errors. Some queries that do not have column names, e.g., SELECT * FROM table, have a default Id column added to the CREATE TABLE statement. Some other queries which use the generic `table` as the FROM table have instead been changed to a variation of `table_name_1` or some other number which is also reflected in the CREATE TABLE statement. ## TODO - Further augment the data by converting queries and CREATE TABLE statements into different SQL dialects. This can be done with SQLGlot. Reference to the dialect might also be added to the question. **Random sample:** ```json { "text" : "[INST] Write SQLite query to answer the following question given the database schema. Please wrap your code answer using ```: Schema: CREATE TABLE head (age INTEGER) Question: How many heads of the departments are older than 56 ? [/INST] Here is the SQLite query to answer to the question: How many heads of the departments are older than 56 ?: ``` SELECT COUNT(*) FROM head WHERE age > 56 ```", "text" : "[INST] Write SQLite query to answer the following question given the database schema. Please wrap your code answer using ```: Schema: CREATE TABLE head (name VARCHAR, born_state VARCHAR, age VARCHAR) Question: List the name, born state and age of the heads of departments ordered by age. [/INST] Here is the SQLite query to answer to the question: List the name, born state and age of the heads of departments ordered by age.: ``` SELECT name, born_state, age FROM head ORDER BY age ```" } ``` The dataset was used to create code-llama-2 style prompts. The basic prompt template is: ``` [INST] Instruction/context [/INST] Model output ```
[ -0.29521623253822327, -0.908272385597229, 0.5218158960342407, 0.028994234278798103, -0.2459906041622162, 0.08020399510860443, 0.09479298442602158, -0.19569902122020721, 0.33943256735801697, 0.894253671169281, -0.6587485074996948, -0.631115734577179, -0.06996326893568039, 0.3269721865653991...
null
null
null
null
null
null
null
null
null
null
null
null
null
manu/test_fboolq
manu
2023-11-14T08:58:37Z
133
0
null
[ "region:us" ]
2023-11-14T08:58:37Z
2023-09-26T08:30:49.000Z
2023-09-26T08:30:49
--- dataset_info: features: - name: question dtype: string - name: passage dtype: string - name: label dtype: int64 splits: - name: test num_bytes: 153880 num_examples: 178 - name: valid num_bytes: 7038 num_examples: 10 download_size: 64042 dataset_size: 160918 configs: - config_name: default data_files: - split: test path: data/test-* - split: valid path: data/valid-* --- # Dataset Card for "test_fboolq" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.60569167137146, -0.5337614417076111, -0.09833135455846786, 0.2814123332500458, -0.1274351328611374, 0.12568016350269318, 0.31711336970329285, -0.10845103859901428, 0.6316388249397278, 0.4499713182449341, -0.756030261516571, -0.7594006061553955, -0.4119507968425751, -0.2250782698392868, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
intone/horror_stories_reddit
intone
2023-10-24T16:16:22Z
133
1
null
[ "task_categories:text-generation", "task_categories:translation", "size_categories:1K<n<10K", "language:en", "region:us" ]
2023-10-24T16:16:22Z
2023-10-24T12:00:59.000Z
2023-10-24T12:00:59
--- task_categories: - text-generation - translation language: - en size_categories: - 1K<n<10K --- # HSR <br> HSR is a compilation of 5605 reddit posts scraped from the following subreddits: - r/ScaryStories - r/LetsNotMeet - r/TwoSentenceHorror - r/freehorrorstories - r/TrueScaryStories - r/NoSleep - r/Ruleshorror # HSR Credits If you are using HSR, you must cite us for your project. This dataset can be used for Translation, Generative or Conversational models. <br> Here are a few ideas that you can use HSR for: <br> - Title-to-story - Text Generation - Spooky chats
[ -0.33681559562683105, -0.9224668741226196, 0.29555627703666687, 0.15887700021266937, -0.5297845602035522, 0.012714090757071972, 0.358761727809906, -0.3031664788722992, 0.5794948935508728, 0.6778436303138733, -0.5655860304832458, -0.435628205537796, -0.40828195214271545, 0.35149845480918884...
null
null
null
null
null
null
null
null
null
null
null
null
null
TerminatorJ/localization_multiRNA
TerminatorJ
2023-11-20T22:37:14Z
133
0
null
[ "license:mit", "region:us" ]
2023-11-20T22:37:14Z
2023-11-20T20:38:27.000Z
2023-11-20T20:38:27
--- license: mit configs: - config_name: default data_files: - split: train path: "Train_fold0.csv" - split: test path: "Test_fold0.csv" - split: validation path: "Val_fold0.csv" ---
[ -0.12853367626667023, -0.18616794049739838, 0.6529126763343811, 0.4943627417087555, -0.19319313764572144, 0.23607443273067474, 0.36071979999542236, 0.05056338757276535, 0.5793654322624207, 0.7400138974189758, -0.6508103013038635, -0.23783987760543823, -0.710224986076355, -0.047825977206230...
null
null
null
null
null
null
null
null
null
null
null
null
null
ahmedlone123/sv_corpora_parliament_processed
ahmedlone123
2022-05-15T11:26:20Z
132
0
null
[ "region:us" ]
2022-05-15T11:26:20Z
2022-05-15T11:25:58.000Z
2022-05-15T11:25:58
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
tomekkorbak/detoxify-pile-chunk3-200000-250000
tomekkorbak
2022-10-06T01:24:15Z
132
0
null
[ "region:us" ]
2022-10-06T01:24:15Z
2022-10-03T19:43:14.000Z
2022-10-03T19:43:14
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
liyucheng/zhihu_rlhf_3k
liyucheng
2023-04-15T17:06:05Z
132
47
null
[ "license:cc-by-2.0", "region:us" ]
2023-04-15T17:06:05Z
2023-04-15T17:03:54.000Z
2023-04-15T17:03:54
--- license: cc-by-2.0 ---
[ -0.12853367626667023, -0.18616794049739838, 0.6529126763343811, 0.4943627417087555, -0.19319313764572144, 0.23607443273067474, 0.36071979999542236, 0.05056338757276535, 0.5793654322624207, 0.7400138974189758, -0.6508103013038635, -0.23783987760543823, -0.710224986076355, -0.047825977206230...
null
null
null
null
null
null
null
null
null
null
null
null
null
Fsoft-AIC/the-vault-function
Fsoft-AIC
2023-07-04T02:33:36Z
132
9
null
[ "task_categories:text-generation", "multilinguality:multiprogramming languages", "language:code", "language:en", "license:mit", "arxiv:2305.06156", "region:us" ]
2023-07-04T02:33:36Z
2023-05-05T14:25:47.000Z
2023-05-05T14:25:47
--- language: - code - en multilinguality: - multiprogramming languages task_categories: - text-generation license: mit dataset_info: features: - name: identifier dtype: string - name: return_type dtype: string - name: repo dtype: string - name: path dtype: string - name: language dtype: string - name: code dtype: string - name: code_tokens dtype: string - name: original_docstring dtype: string - name: comment dtype: string - name: docstring_tokens dtype: string - name: docstring dtype: string - name: original_string dtype: string pretty_name: The Vault Function viewer: true --- ## Table of Contents - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks](#supported-tasks) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Statistics](#dataset-statistics) - [Usage](#usage) - [Additional Information](#additional-information) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Repository:** [FSoft-AI4Code/TheVault](https://github.com/FSoft-AI4Code/TheVault) - **Paper:** [The Vault: A Comprehensive Multilingual Dataset for Advancing Code Understanding and Generation](https://arxiv.org/abs/2305.06156) - **Contact:** support.ailab@fpt.com - **Website:** https://www.fpt-aicenter.com/ai-residency/ <p align="center"> <img src="https://raw.githubusercontent.com/FSoft-AI4Code/TheVault/main/assets/the-vault-4-logo-png.png" width="300px" alt="logo"> </p> <div align="center"> # The Vault: A Comprehensive Multilingual Dataset for Advancing Code Understanding and Generation </div> ## Dataset Summary The Vault dataset is a comprehensive, large-scale, multilingual parallel dataset that features high-quality code-text pairs derived from The Stack, the largest permissively-licensed source code dataset. We provide The Vault which contains code snippets from 10 popular programming languages such as Java, JavaScript, Python, Ruby, Rust, Golang, C#, C++, C, and PHP. This dataset provides multiple code-snippet levels, metadata, and 11 docstring styles for enhanced usability and versatility. ## Supported Tasks The Vault can be used for pretraining LLMs or downstream code-text interaction tasks. A number of tasks related to code understanding and geneartion can be constructed using The Vault such as *code summarization*, *text-to-code generation* and *code search*. ## Languages The natural language text (docstring) is in English. 10 programming languages are supported in The Vault: `Python`, `Java`, `JavaScript`, `PHP`, `C`, `C#`, `C++`, `Go`, `Ruby`, `Rust` ## Dataset Structure ### Data Instances ``` { "hexsha": "5c47f0b4c173a8fd03e4e633d9b3dd8211e67ad0", "repo": "neumanna94/beepboop", "path": "js/scripts.js", "license": [ "MIT" ], "language": "JavaScript", "identifier": "beepBoopSelector", "return_type": "<not_specific>", "original_string": "function beepBoopSelector(inputString, bbFunction){\n if(bbFunction==1){\n return beepBoop(inputString);\n } else if(bbFunction==2){\n return beepBoop2(inputString);\n } else if(bbFunction==3){\n return beepBoop3(inputString);\n } else {\n }\n}", "original_docstring": "//Determines what beepBoop function to use", "docstring": "Determines what beepBoop function to use", "docstring_tokens": [ "Determines", "what", "beepBoop", "function", "to", "use" ], "code": "function beepBoopSelector(inputString, bbFunction){\n if(bbFunction==1){\n return beepBoop(inputString);\n } else if(bbFunction==2){\n return beepBoop2(inputString);\n } else if(bbFunction==3){\n return beepBoop3(inputString);\n } else {\n }\n}", "code_tokens": [ "function", "beepBoopSelector", "(", "inputString", ",", "bbFunction", ")", "{", "if", "(", "bbFunction", "==", "1", ")", "{", "return", "beepBoop", "(", "inputString", ")", ";", "}", "else", "if", "(", "bbFunction", "==", "2", ")", "{", "return", "beepBoop2", "(", "inputString", ")", ";", "}", "else", "if", "(", "bbFunction", "==", "3", ")", "{", "return", "beepBoop3", "(", "inputString", ")", ";", "}", "else", "{", "}", "}" ], "short_docstring": "Determines what beepBoop function to use", "short_docstring_tokens": [ "Determines", "what", "beepBoop", "function", "to", "use" ], "comment": [], "parameters": [ { "param": "inputString", "type": null }, { "param": "bbFunction", "type": null } ], "docstring_params": { "returns": [], "raises": [], "params": [ { "identifier": "inputString", "type": null, "docstring": null, "docstring_tokens": [], "default": null, "is_optional": null }, { "identifier": "bbFunction", "type": null, "docstring": null, "docstring_tokens": [], "default": null, "is_optional": null } ], "outlier_params": [], "others": [] } } ``` ### Data Fields Data fields for function level: - **hexsha** (string): the unique git hash of file - **repo** (string): the owner/repo - **path** (string): the full path to the original file - **license** (list): licenses in the repo - **language** (string): the programming language - **identifier** (string): the function or method name - **return_type** (string): the type returned by the function - **original_string** (string): original version of function/class node - **original_docstring** (string): the raw string before tokenization or parsing - **code** (string): the part of the original that is code - **code_tokens** (list): tokenized version of `code` - **short_docstring** (string): short, brief summarization (first line of the docstring) - **short_docstring_tokens** (list): tokenized version of `short_docstring - **docstring** (string): the top-level comment or docstring (docstring version without param’s doc, return, exception fields, etc) - **docstring_tokens** (list): tokenized version of docstring - **comment** (list): list of comments (line) inside the function/class - **parameters** (list): List of parameters and its type (type can be None) - **docstring_params** (dict): Dictionary of the parsed information from docstring See [here](https://github.com/FSoft-AI4Code/TheVault/blob/main/data/README.md) for more details and examples. ### Data Splits In this repo, The Vault is divided into 5 subsets, where three training versions are split based on size of the full training set, and the remains are validation set and test set (approximate 20,000 samples in each). The statistic for languages in each split set is illustrated in the following section. Before split, the dataset is deduplicated. There are 3 versions of training set that are small (5%), medium (20%) and large (100%). ## Dataset Statistics - Compare to other benchmarks | Dataset | #Language | #Code-text pair | |:--------------------------|----------:|-----------------:| | PyMT5 | 1 | ≈ 7,700,000 | | CoDesc | 1 | 4,211,516 | | CodeSearchNet | 6 | 2,326,976 | | CodeSearchNet (CodeXGLUE) | 6 | 1,005,474 | | Deepcom | 1 | 424,028 | | CONCODE | 1 | 2,184,310 | | Funcom | 1 | 2,149,121 | | CodeT5 | 8 | 3,158,313 | | **The Vault** | **10** | **34,098,775** | - Statistic for split sets | | train/small | train/medium | train/full | validation | test | total | |:-----------|------------:|-------------:|-----------:|-----------:|-------:|--------------:| |Python | 370,657 | 1,952,110 | 7,772,647 | 30,992 | 21,652 | 7,825,291 | |Java | 351,213 | 1,612,366 | 6,629,193 | 22,677 | 15,552 | 6,667,422 | |JavaScript | 82,931 | 404,729 | 1,640,416 | 22,044 | 21,108 | 1,683,568 | |PHP | 236,638 | 1,155,476 | 4,656,371 | 21,375 | 19,010 | 4,696,756 | |C | 105,978 | 381,207 | 1,639,319 | 27,525 | 19,122 | 1,685,966 | |C# | 141,090 | 783,166 | 3,305,891 | 24,787 | 19,638 | 3,350,316 | |C++ | 87,420 | 410,907 | 1,671,268 | 20,011 | 18,169 | 1,709,448 | |Go | 267,535 | 1,319,547 | 5,109,020 | 19,102 | 25,314 | 5,153,436 | |Ruby | 23,921 | 112,574 | 424,339 | 17,338 | 19,908 | 461,585 | |Rust | 35,367 | 224,015 | 825,130 | 16,716 | 23,141 | 864,987 | |TOTAL | 1,702,750 | 8,356,097 |33,673,594 |222,567 |202,614 |**34,098,775** | ## Usage You can load The Vault dataset using datasets library: ```pip install datasets``` ```python from datasets import load_dataset # Load full function level dataset (34M samples) dataset = load_dataset("Fsoft-AIC/the-vault-function") # Load function level train/validation/test set dataset = load_dataset("Fsoft-AIC/the-vault-function", split_set=["train"]) # Load "small" (or "medium", "full") version of function level training set dataset = load_dataset("Fsoft-AIC/the-vault-function", split_set=["train/small"]) # specific language (e.g. Python) dataset = load_dataset("Fsoft-AIC/the-vault-function", split_set=["train"], languages=['Python']) # dataset streaming data = load_dataset("Fsoft-AIC/the-vault-function", split_set= ["train"], streaming= True) for sample in iter(data['train']): print(sample) ``` A back up dataset can be downloaded in azure storage. See [Download The Vault from Azure blob storage](https://github.com/FSoft-AI4Code/TheVault#download-via-link). ## Additional information ### Licensing Information MIT License ### Citation Information ``` @article{manh2023vault, title={The Vault: A Comprehensive Multilingual Dataset for Advancing Code Understanding and Generation}, author={Manh, Dung Nguyen and Hai, Nam Le and Dau, Anh TV and Nguyen, Anh Minh and Nghiem, Khanh and Guo, Jin and Bui, Nghi DQ}, journal={arXiv preprint arXiv:2305.06156}, year={2023} } ``` ### Contributions This dataset is developed by [FSOFT AI4Code team](https://github.com/FSoft-AI4Code).
[ -0.3331475257873535, -0.38069021701812744, 0.2592766284942627, 0.2689876854419708, -0.03977419063448906, 0.3123897612094879, -0.1073523610830307, -0.32772496342658997, 0.17762960493564606, 0.2865162193775177, -0.6447073221206665, -1.0547635555267334, -0.4168764352798462, 0.3191742300987243...
null
null
null
null
null
null
null
null
null
null
null
null
null
nlpai-lab/openassistant-guanaco-ko
nlpai-lab
2023-06-01T10:44:35Z
132
4
null
[ "task_categories:text-generation", "task_categories:question-answering", "task_categories:summarization", "size_categories:1K<n<10K", "language:ko", "license:apache-2.0", "region:us" ]
2023-06-01T10:44:35Z
2023-06-01T06:54:34.000Z
2023-06-01T06:54:34
--- license: apache-2.0 task_categories: - text-generation - question-answering - summarization language: - ko size_categories: - 1K<n<10K --- ### Dataset Summary Korean translation of Guanaco via the DeepL API Note: There are cases where multilingual data has been converted to monolingual data during batch translation to Korean using the API. Below is Guanaco's README. ---- This dataset is a subset of the Open Assistant dataset, which you can find here: https://huggingface.co/datasets/OpenAssistant/oasst1/tree/main This subset of the data only contains the highest-rated paths in the conversation tree, with a total of 9,846 samples. This dataset was used to train Guanaco with QLoRA. For further information, please see the original dataset. License: Apache 2.0
[ -0.09971699863672256, -0.5356552600860596, 0.4159107804298401, 0.32636454701423645, -0.22031919658184052, 0.05748819559812546, -0.3623049557209015, -0.5907943248748779, 0.17823995649814606, 0.6364795565605164, -0.8608821630477905, -1.0669825077056885, -0.48949646949768066, -0.0282803159207...
null
null
null
null
null
null
null
null
null
null
null
null
null
dmayhem93/agieval-lsat-ar
dmayhem93
2023-06-18T17:25:42Z
132
1
null
[ "arxiv:2304.06364", "arxiv:2104.06598", "region:us" ]
2023-06-18T17:25:42Z
2023-06-18T12:50:26.000Z
2023-06-18T12:50:26
--- dataset_info: features: - name: query dtype: string - name: choices sequence: string - name: gold sequence: int64 splits: - name: test num_bytes: 273902 num_examples: 230 download_size: 66495 dataset_size: 273902 --- # Dataset Card for "agieval-lsat-ar" Dataset taken from https://github.com/microsoft/AGIEval and processed as in that repo. Raw datset: https://github.com/zhongwanjun/AR-LSAT MIT License Copyright (c) 2022 Wanjun Zhong Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. @misc{zhong2023agieval, title={AGIEval: A Human-Centric Benchmark for Evaluating Foundation Models}, author={Wanjun Zhong and Ruixiang Cui and Yiduo Guo and Yaobo Liang and Shuai Lu and Yanlin Wang and Amin Saied and Weizhu Chen and Nan Duan}, year={2023}, eprint={2304.06364}, archivePrefix={arXiv}, primaryClass={cs.CL} } @misc{zhong2021arlsat, title={AR-LSAT: Investigating Analytical Reasoning of Text}, author={Wanjun Zhong and Siyuan Wang and Duyu Tang and Zenan Xu and Daya Guo and Jiahai Wang and Jian Yin and Ming Zhou and Nan Duan}, year={2021}, eprint={2104.06598}, archivePrefix={arXiv}, primaryClass={cs.CL} } @article{wang2022lsat, title={From lsat: The progress and challenges of complex reasoning}, author={Wang, Siyuan and Liu, Zhongkun and Zhong, Wanjun and Zhou, Ming and Wei, Zhongyu and Chen, Zhumin and Duan, Nan}, journal={IEEE/ACM Transactions on Audio, Speech, and Language Processing}, year={2022}, publisher={IEEE} }
[ -0.50382399559021, -0.6589188575744629, 0.30116894841194153, 0.175467848777771, -0.19890575110912323, -0.23951786756515503, 0.03890351578593254, -0.466290682554245, 0.0702500194311142, 0.5052745938301086, -0.5328441262245178, -0.32301104068756104, -0.4340111315250397, -0.08153180778026581,...
null
null
null
null
null
null
null
null
null
null
null
null
null
bayartsogt/test_dlub_2023
bayartsogt
2023-06-30T05:53:46Z
132
3
null
[ "region:us" ]
2023-06-30T05:53:46Z
2023-06-29T03:54:24.000Z
2023-06-29T03:54:24
--- dataset_info: features: - name: question dtype: string - name: answer dtype: string splits: - name: train num_bytes: 6600 num_examples: 38 - name: test num_bytes: 7153 num_examples: 38 download_size: 0 dataset_size: 13753 --- # Dataset Card for "test_dlub_2023" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7308093905448914, -0.31869328022003174, 0.24495616555213928, 0.20935653150081635, -0.12112905085086823, 0.051546771079301834, 0.5208814740180969, -0.18904758989810944, 0.6056547164916992, 0.4855330288410187, -0.9898263216018677, -0.7041423320770264, -0.387789785861969, -0.22386868298053...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1.2
open-llm-leaderboard
2023-10-28T11:30:38Z
132
1
null
[ "region:us" ]
2023-10-28T11:30:38Z
2023-10-10T19:14:44.000Z
2023-10-10T19:14:44
--- pretty_name: Evaluation run of ICBU-NPU/FashionGPT-70B-V1.2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [ICBU-NPU/FashionGPT-70B-V1.2](https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1.2)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1.2\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-28T11:30:26.266910](https://huggingface.co/datasets/open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1.2/blob/main/results_2023-10-28T11-30-26.266910.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.007235738255033557,\n\ \ \"em_stderr\": 0.00086796885701786,\n \"f1\": 0.08897126677852359,\n\ \ \"f1_stderr\": 0.0016572567969813893,\n \"acc\": 0.5329529019437246,\n\ \ \"acc_stderr\": 0.011217384303167696\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.007235738255033557,\n \"em_stderr\": 0.00086796885701786,\n\ \ \"f1\": 0.08897126677852359,\n \"f1_stderr\": 0.0016572567969813893\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2403335860500379,\n \ \ \"acc_stderr\": 0.01176958070383695\n },\n \"harness|winogrande|5\":\ \ {\n \"acc\": 0.8255722178374112,\n \"acc_stderr\": 0.010665187902498444\n\ \ }\n}\n```" repo_url: https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1.2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|arc:challenge|25_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-10-10T19-14-20.366315.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_28T11_30_26.266910 path: - '**/details_harness|drop|3_2023-10-28T11-30-26.266910.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-28T11-30-26.266910.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_28T11_30_26.266910 path: - '**/details_harness|gsm8k|5_2023-10-28T11-30-26.266910.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-28T11-30-26.266910.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hellaswag|10_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-10T19-14-20.366315.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-management|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-virology|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-10T19-14-20.366315.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_10_10T19_14_20.366315 path: - '**/details_harness|truthfulqa:mc|0_2023-10-10T19-14-20.366315.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-10-10T19-14-20.366315.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_28T11_30_26.266910 path: - '**/details_harness|winogrande|5_2023-10-28T11-30-26.266910.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-28T11-30-26.266910.parquet' - config_name: results data_files: - split: 2023_10_10T19_14_20.366315 path: - results_2023-10-10T19-14-20.366315.parquet - split: 2023_10_28T11_30_26.266910 path: - results_2023-10-28T11-30-26.266910.parquet - split: latest path: - results_2023-10-28T11-30-26.266910.parquet --- # Dataset Card for Evaluation run of ICBU-NPU/FashionGPT-70B-V1.2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1.2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [ICBU-NPU/FashionGPT-70B-V1.2](https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-28T11:30:26.266910](https://huggingface.co/datasets/open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1.2/blob/main/results_2023-10-28T11-30-26.266910.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.007235738255033557, "em_stderr": 0.00086796885701786, "f1": 0.08897126677852359, "f1_stderr": 0.0016572567969813893, "acc": 0.5329529019437246, "acc_stderr": 0.011217384303167696 }, "harness|drop|3": { "em": 0.007235738255033557, "em_stderr": 0.00086796885701786, "f1": 0.08897126677852359, "f1_stderr": 0.0016572567969813893 }, "harness|gsm8k|5": { "acc": 0.2403335860500379, "acc_stderr": 0.01176958070383695 }, "harness|winogrande|5": { "acc": 0.8255722178374112, "acc_stderr": 0.010665187902498444 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4394247531890869, -0.6702339053153992, 0.12279921770095825, 0.28563690185546875, -0.27832645177841187, 0.18721787631511688, -0.3210861384868622, -0.23431982100009918, 0.3797749876976013, 0.4307623505592346, -0.7155715227127075, -0.8938931822776794, -0.650145411491394, 0.1368642151355743...
null
null
null
null
null
null
null
null
null
null
null
null
null
DeepLearner101/ImageNetSubset_16130352366404471562604770850950
DeepLearner101
2023-11-13T12:42:48Z
132
0
null
[ "region:us" ]
2023-11-13T12:42:48Z
2023-11-12T18:58:29.000Z
2023-11-12T18:58:29
--- dataset_info: features: - name: image dtype: image - name: label dtype: int64 splits: - name: train num_bytes: 65973972.0 num_examples: 1760 - name: validation num_bytes: 20185132.0 num_examples: 550 - name: test num_bytes: 15117832.0 num_examples: 352 download_size: 101216900 dataset_size: 101276936.0 configs: - config_name: default data_files: - split: train path: data/train-* - split: validation path: data/validation-* - split: test path: data/test-* --- # Dataset Card for "ImageNetSubset_16130352366404471562604770850950" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5885011553764343, 0.034214459359645844, 0.0061849565245211124, 0.3131588101387024, -0.5281749367713928, -0.23163676261901855, 0.34375566244125366, -0.11835725605487823, 0.892397403717041, 0.5996111631393433, -0.6561373472213745, -0.7232762575149536, -0.8345786333084106, -0.3369979262351...
null
null
null
null
null
null
null
null
null
null
null
null
null
teven/c4_15M
teven
2021-12-06T03:44:05Z
131
1
null
[ "region:us" ]
2021-12-06T03:44:05Z
2022-03-02T23:29:22.000Z
2022-03-02T23:29:22
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
fabiochiu/medium-articles
fabiochiu
2022-07-17T15:17:09Z
131
7
null
[ "license:mit", "region:us" ]
2022-07-17T15:17:09Z
2022-07-16T15:34:11.000Z
2022-07-16T15:34:11
--- license: mit --- # Data source This data has been collected through a standard scraping process from the [Medium website](https://medium.com/), looking for published articles. # Data description Each row in the data is a different article published on Medium. For each article, you have the following features: - **title** *[string]*: The title of the article. - **text** *[string]*: The text content of the article. - **url** *[string]*: The URL associated to the article. - **authors** *[list of strings]*: The article authors. - **timestamp** *[string]*: The publication datetime of the article. - **tags** *[list of strings]*: List of tags associated to the article. # Data analysis You can find a very quick data analysis in this [notebook](https://www.kaggle.com/code/fabiochiusano/medium-articles-simple-data-analysis). # What can I do with this data? - A multilabel classification model that assigns tags to articles. - A seq2seq model that generates article titles. - Text analysis. - Finetune text generation models on the general domain of Medium, or on specific domains by filtering articles by the appropriate tags. # Collection methodology Scraping has been done with Python and the requests library. Starting from a random article on Medium, the next articles to scrape are selected by visiting: 1. The author archive pages. 2. The publication archive pages (if present). 3. The tags archives (if present). The article HTML pages have been parsed with the [newspaper Python library](https://github.com/codelucas/newspaper). Published articles have been filtered for English articles only, using the Python [langdetect library](https://pypi.org/project/langdetect/). As a consequence of the collection methodology, the scraped articles are coming from a not uniform publication date distribution. This means that there are articles published in 2016 and in 2022, but the number of articles in this dataset published in 2016 is not the same as the number of articles published in 2022. In particular, there is a strong prevalence of articles published in 2020. Have a look at the [accompanying notebook](https://www.kaggle.com/code/fabiochiusano/medium-articles-simple-data-analysis) to see the distribution of the publication dates.
[ -0.30070242285728455, -0.7774046659469604, 0.5278884172439575, 0.22569313645362854, -0.263335257768631, 0.03180692344903946, -0.13661496341228485, -0.6390842199325562, 0.6180955171585083, 0.3242999315261841, -0.397912859916687, -0.5474873185157776, -0.41458430886268616, 0.3375656008720398,...
null
null
null
null
null
null
null
null
null
null
null
null
null
pyronear/openfire
pyronear
2022-12-11T22:25:43Z
131
2
null
[ "task_categories:image-classification", "annotations_creators:crowdsourced", "language_creators:crowdsourced", "size_categories:1K<n<10K", "source_datasets:original", "license:apache-2.0", "region:us" ]
2022-12-11T22:25:43Z
2022-07-17T16:11:22.000Z
2022-07-17T16:11:22
--- annotations_creators: - crowdsourced language_creators: - crowdsourced language: [] license: - apache-2.0 multilinguality: [] size_categories: - 1K<n<10K source_datasets: - original task_categories: - image-classification task_ids: [] pretty_name: Wildfire image classification dataset collected using images from web searches. --- # Dataset Card for OpenFire ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** https://pyronear.org/pyro-vision/datasets.html#openfire - **Repository:** https://github.com/pyronear/pyro-vision - **Point of Contact:** Pyronear <https://pyronear.org/en/> ### Dataset Summary OpenFire is an image classification dataset for wildfire detection, collected from web searches. ### Supported Tasks and Leaderboards - `image-classification`: The dataset can be used to train a model for Image Classification. ### Languages English ## Dataset Structure ### Data Instances A data point comprises an image URL and its binary label. ``` { 'image_url': 'https://cdn-s-www.ledauphine.com/images/13C08274-6BA6-4577-B3A0-1E6C1B2A573C/FB1200/photo-1338240831.jpg', 'is_wildfire': true, } ``` ### Data Fields - `image_url`: the download URL of the image. - `is_wildfire`: a boolean value specifying whether there is an ongoing wildfire on the image. ### Data Splits The data is split into training and validation sets. The training set contains 7143 images and the validation set 792 images. ## Dataset Creation ### Curation Rationale The curators state that the current wildfire classification datasets typically contain close-up shots of wildfires, with limited variations of weather conditions, luminosity and backrgounds, making it difficult to assess for real world performance. They argue that the limitations of datasets have partially contributed to the failure of some algorithms in coping with sun flares, foggy / cloudy weather conditions and small scale. ### Source Data #### Initial Data Collection and Normalization OpenFire was collected using images publicly indexed by the search engine DuckDuckGo using multiple relevant queries. The images were then manually cleaned to remove errors. ### Annotations #### Annotation process Each web search query was designed to yield a single label (with wildfire or without), and additional human verification was used to remove errors. #### Who are the annotators? François-Guillaume Fernandez ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators François-Guillaume Fernandez ### Licensing Information [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0). ### Citation Information ``` @software{Pyronear_PyroVision_2019, title={Pyrovision: wildfire early detection}, author={Pyronear contributors}, year={2019}, month={October}, publisher = {GitHub}, howpublished = {\url{https://github.com/pyronear/pyro-vision}} } ```
[ -0.15110652148723602, -0.24326471984386444, -0.10447286814451218, 0.3785229027271271, -0.13900987803936005, -0.27792152762413025, -0.3557240962982178, -0.2475811392068863, 0.009431933984160423, 0.4206072688102722, -0.6970270276069641, -0.9130090475082397, -0.3994120955467224, -0.1621199399...
null
null
null
null
null
null
null
null
null
null
null
null
null
truongpdd/vietnews-dataset
truongpdd
2022-09-09T04:54:20Z
131
2
null
[ "region:us" ]
2022-09-09T04:54:20Z
2022-09-09T04:06:45.000Z
2022-09-09T04:06:45
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
Freed-Wu/kodak
Freed-Wu
2022-11-19T05:43:53Z
131
0
null
[ "task_categories:other", "annotations_creators:no-annotation", "language_creators:found", "multilinguality:monolingual", "size_categories:n<1K", "source_datasets:original", "language:en", "license:gpl-3.0", "region:us" ]
2022-11-19T05:43:53Z
2022-11-19T05:43:53.000Z
2022-11-19T05:43:53
--- annotations_creators: - no-annotation language: - en language_creators: - found license: - gpl-3.0 multilinguality: - monolingual pretty_name: kodak size_categories: - n<1K source_datasets: - original tags: [] task_categories: - other task_ids: [] dataset_info: features: - name: image dtype: image splits: - name: test num_bytes: 15072 num_examples: 24 download_size: 15072 dataset_size: 15072 --- # Dataset Card for kodak ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Instances](#data-instances) - [Data Fields](#data-fields) - [Data Splits](#data-splits) - [Dataset Creation](#dataset-creation) - [Curation Rationale](#curation-rationale) - [Source Data](#source-data) - [Annotations](#annotations) - [Personal and Sensitive Information](#personal-and-sensitive-information) - [Considerations for Using the Data](#considerations-for-using-the-data) - [Social Impact of Dataset](#social-impact-of-dataset) - [Discussion of Biases](#discussion-of-biases) - [Other Known Limitations](#other-known-limitations) - [Additional Information](#additional-information) - [Dataset Curators](#dataset-curators) - [Licensing Information](#licensing-information) - [Citation Information](#citation-information) - [Contributions](#contributions) ## Dataset Description - **Homepage:** <https://r0k.us/graphics/kodak/> - **Repository:** <https://github.com/MohamedBakrAli/Kodak-Lossless-True-Color-Image-Suite> - **Paper:** - **Leaderboard:** - **Point of Contact:** ### Dataset Summary The pictures below link to lossless, true color (24 bits per pixel, aka "full color") images. It is my understanding they have been released by the Eastman Kodak Company for unrestricted usage. Many sites use them as a standard test suite for compression testing, etc. Prior to this site, they were only available in the Sun Raster format via ftp. This meant that the images could not be previewed before downloading. Since their release, however, the lossless PNG format has been incorporated into all the major browsers. Since PNG supports 24-bit lossless color (which GIF and JPEG do not), it is now possible to offer this browser-friendly access to the images. ### Supported Tasks and Leaderboards - Image compression ### Languages - en ## Dataset Structure ### Data Instances - [![kodak01](https://r0k.us/graphics/kodak/thumbs/kodim01t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim01.png) - [![kodak02](https://r0k.us/graphics/kodak/thumbs/kodim02t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim02.png) - [![kodak03](https://r0k.us/graphics/kodak/thumbs/kodim03t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim03.png) - [![kodak04](https://r0k.us/graphics/kodak/thumbs/kodim04t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim04.png) - [![kodak05](https://r0k.us/graphics/kodak/thumbs/kodim05t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim05.png) - [![kodak06](https://r0k.us/graphics/kodak/thumbs/kodim06t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim06.png) - [![kodak07](https://r0k.us/graphics/kodak/thumbs/kodim07t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim07.png) - [![kodak08](https://r0k.us/graphics/kodak/thumbs/kodim08t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim08.png) - [![kodak09](https://r0k.us/graphics/kodak/thumbs/kodim09t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim09.png) - [![kodak10](https://r0k.us/graphics/kodak/thumbs/kodim10t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim10.png) - [![kodak11](https://r0k.us/graphics/kodak/thumbs/kodim11t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim11.png) - [![kodak12](https://r0k.us/graphics/kodak/thumbs/kodim12t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim12.png) - [![kodak13](https://r0k.us/graphics/kodak/thumbs/kodim13t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim13.png) - [![kodak14](https://r0k.us/graphics/kodak/thumbs/kodim14t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim14.png) - [![kodak15](https://r0k.us/graphics/kodak/thumbs/kodim15t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim15.png) - [![kodak16](https://r0k.us/graphics/kodak/thumbs/kodim16t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim16.png) - [![kodak17](https://r0k.us/graphics/kodak/thumbs/kodim17t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim17.png) - [![kodak18](https://r0k.us/graphics/kodak/thumbs/kodim18t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim18.png) - [![kodak19](https://r0k.us/graphics/kodak/thumbs/kodim19t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim19.png) - [![kodak20](https://r0k.us/graphics/kodak/thumbs/kodim20t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim20.png) - [![kodak21](https://r0k.us/graphics/kodak/thumbs/kodim21t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim21.png) - [![kodak22](https://r0k.us/graphics/kodak/thumbs/kodim22t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim22.png) - [![kodak23](https://r0k.us/graphics/kodak/thumbs/kodim23t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim23.png) - [![kodak24](https://r0k.us/graphics/kodak/thumbs/kodim24t.jpg)](https://r0k.us/graphics/kodak/kodak/kodim24.png) ### Data Fields ### Data Splits ## Dataset Creation ### Curation Rationale ### Source Data #### Initial Data Collection and Normalization #### Who are the source language producers? <https://www.kodak.com> ### Annotations #### Annotation process #### Who are the annotators? ### Personal and Sensitive Information ## Considerations for Using the Data ### Social Impact of Dataset ### Discussion of Biases ### Other Known Limitations ## Additional Information ### Dataset Curators ### Licensing Information [LICENSE](LICENSE) ### Citation Information ### Contributions Thanks to [@Freed-Wu](https://github.com/Freed-Wu) for adding this dataset.
[ -0.7813981175422668, 0.023115715011954308, 0.58560711145401, -0.1447632610797882, -0.4813326895236969, 0.011588474735617638, -0.05838112533092499, -0.6627343893051147, 0.648419201374054, 0.44216927886009216, -0.7412939071655273, -0.7173404097557068, -0.7333112955093384, 0.28038427233695984...
null
null
null
null
null
null
null
null
null
null
null
null
null
pszemraj/SQuALITY-v1.3
pszemraj
2023-02-27T08:42:44Z
131
0
null
[ "task_categories:summarization", "task_categories:text2text-generation", "size_categories:n<1K", "language:en", "license:apache-2.0", "summarization", "long-document", "arxiv:2205.11465", "region:us" ]
2023-02-27T08:42:44Z
2023-02-27T08:25:50.000Z
2023-02-27T08:25:50
--- license: apache-2.0 language: - en task_categories: - summarization - text2text-generation tags: - summarization - long-document pretty_name: SQuALITY v1.3 size_categories: - n<1K --- # SQuALITY - v1.3 > Original paper [here](https://arxiv.org/abs/2205.11465) This is v1.3, the 'text' edition `.jsonl` files. See description from the [original repo](https://github.com/nyu-mll/SQuALITY): > v1.3 fixes some bugs in v1.2. In v1.2, 10 out of 127 articles (each ~5k-word-long) are missing a few hundreds words each, so summaries may not be fully contained in the article. To fix this issue, we have updated the 10 articles. ## contents > again, this is taken from the repo Each data file ({train/dev/test}.jsonl) is formatted as a JSON lines file. Each row in the data file is a JSON dictionary with the following fields: - metadata: the Gutenberg story ID, an internal UID, and the Project Gutenberg license - document: the Gutenberg story questions: a list of questions and accompanying responses - question text - question number: the order in which that question was answered by the writers - responses: list of worker's response, where each response is a dictionary containing the (anonymized) worker ID, an internal UID, and their response to the question ### dataset contents ```python DatasetDict({ train: Dataset({ features: ['metadata', 'document', 'questions'], num_rows: 50 }) test: Dataset({ features: ['metadata', 'document', 'questions'], num_rows: 52 }) validation: Dataset({ features: ['metadata', 'document', 'questions'], num_rows: 25 }) }) ```
[ -0.35225772857666016, -0.6885549426078796, 0.37787380814552307, 0.5335370302200317, -0.058993034064769745, -0.28384119272232056, -0.1577632874250412, -0.5698612332344055, 0.08061547577381134, 0.3865903913974762, -0.5279310941696167, -0.38197416067123413, -0.3982541263103485, 0.445786893367...
null
null
null
null
null
null
null
null
null
null
null
null
null
shibing624/alpaca-zh
shibing624
2023-05-10T06:09:06Z
131
47
null
[ "task_categories:text-generation", "size_categories:10K<n<100K", "language:zh", "license:cc-by-4.0", "gpt", "alpaca", "fine-tune", "instruct-tune", "instruction", "arxiv:2304.03277", "region:us" ]
2023-05-10T06:09:06Z
2023-03-25T11:37:25.000Z
2023-03-25T11:37:25
--- dataset_info: features: - name: instruction dtype: string - name: input dtype: string - name: output dtype: string splits: - name: train num_bytes: 32150579 num_examples: 48818 download_size: 35100559 dataset_size: 32150579 license: cc-by-4.0 language: - zh pretty_name: Instruction Tuning with GPT-4 size_categories: - 10K<n<100K task_categories: - text-generation tags: - gpt - alpaca - fine-tune - instruct-tune - instruction --- # Dataset Description - **Project Page:** https://instruction-tuning-with-gpt-4.github.io - **Repo:** https://github.com/Instruction-Tuning-with-GPT-4/GPT-4-LLM - **Paper:** https://arxiv.org/abs/2304.03277 # Dataset Card for "alpaca-zh" 本数据集是参考Alpaca方法基于GPT4得到的self-instruct数据,约5万条。 Dataset from https://github.com/Instruction-Tuning-with-GPT-4/GPT-4-LLM It is the chinese dataset from https://github.com/Instruction-Tuning-with-GPT-4/GPT-4-LLM/blob/main/data/alpaca_gpt4_data_zh.json # Usage and License Notices The data is intended and licensed for research use only. The dataset is CC BY NC 4.0 (allowing only non-commercial use) and models trained using the dataset should not be used outside of research purposes. train model with alpaca-zh dataset: https://github.com/shibing624/textgen # English Dataset [Found here](https://huggingface.co/datasets/c-s-ale/alpaca-gpt4-data) # Citation ``` @article{peng2023gpt4llm, title={Instruction Tuning with GPT-4}, author={Baolin Peng, Chunyuan Li, Pengcheng He, Michel Galley, Jianfeng Gao}, journal={arXiv preprint arXiv:2304.03277}, year={2023} } ```
[ -0.33568066358566284, -0.6570315957069397, 0.39126458764076233, 0.24592578411102295, -0.6408465504646301, -0.4142588675022125, -0.14586175978183746, -0.48949819803237915, 0.04026072472333908, 0.30570557713508606, -0.8283568024635315, -0.8338583111763, -0.549497663974762, 0.0420289635658264...
null
null
null
null
null
null
null
null
null
null
null
null
null
distil-whisper/common_voice_13_0
distil-whisper
2023-09-25T10:30:13Z
131
1
null
[ "task_categories:automatic-speech-recognition", "language:en", "license:cc0-1.0", "region:us" ]
2023-09-25T10:30:13Z
2023-04-17T16:51:15.000Z
2023-04-17T16:51:15
--- license: cc0-1.0 task_categories: - automatic-speech-recognition language: - en -pretty_name: Common Voice 13 --- # Distil Whisper: Common Voice 13 This is a variant of the [Common Voice 13](https://huggingface.co/datasets/mozilla_foundation/common_voice_13) dataset, augmented to return the pseudo-labelled Whisper Transcriptions alongside the original dataset elements. The pseudo-labelled transcriptions were generated by labelling the input audio data with the Whisper [large-v2](https://huggingface.co/openai/whisper-large-v2) model with *greedy* sampling. For information on how the original dataset was curated, refer to the original [dataset card](https://huggingface.co/datasets/mozilla_foundation/common_voice_13). ## Standalone Usage First, install the latest version of the 🤗 Datasets package: ```bash pip install --upgrade pip pip install --upgrade datasets[audio] ``` The dataset can be downloaded and pre-processed on disk using the [`load_dataset`](https://huggingface.co/docs/datasets/v2.14.5/en/package_reference/loading_methods#datasets.load_dataset) function: ```python from datasets import load_dataset dataset = load_dataset("distil-whisper/common_voice_13_0", "en") # take the first sample of the validation set sample = dataset["validation"][0] ``` It can also be streamed directly from the Hub using Datasets' [streaming mode](https://huggingface.co/blog/audio-datasets#streaming-mode-the-silver-bullet). Loading a dataset in streaming mode loads individual samples of the dataset at a time, rather than downloading the entire dataset to disk: ```python from datasets import load_dataset dataset = load_dataset("distil-whisper/common_voice_13_0", "en", streaming=True) # take the first sample of the validation set sample = next(iter(dataset["validation"])) ``` ## Distil Whisper Usage To use this dataset to reproduce a Distil Whisper training run, refer to the instructions on the [Distil Whisper repository](https://github.com/huggingface/distil-whisper#training). ## License This dataset is licensed under cc0-1.0.
[ -0.26834267377853394, -0.5749202966690063, 0.09994259476661682, 0.5656177401542664, -0.20969432592391968, 0.07162373512983322, -0.18821702897548676, -0.23841822147369385, 0.4191300868988037, 0.35731643438339233, -0.9785483479499817, -0.41534265875816345, -0.5774230360984802, 0.187987908720...
null
null
null
null
null
null
null
null
null
null
null
null
null
lansinuote/diffusion.8.instruct_pix2pix
lansinuote
2023-05-16T10:00:52Z
131
0
null
[ "region:us" ]
2023-05-16T10:00:52Z
2023-05-16T08:18:58.000Z
2023-05-16T08:18:58
--- dataset_info: features: - name: input dtype: image - name: text dtype: string - name: output dtype: image splits: - name: train num_bytes: 416880509.0 num_examples: 1000 download_size: 416898966 dataset_size: 416880509.0 --- # Dataset Card for "diffusion.8.instruct_pix2pix" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.628951907157898, -0.4810026288032532, 0.5212926268577576, 0.35944661498069763, -0.029466770589351654, -0.20474427938461304, 0.2698279917240143, 0.0708267018198967, 0.5606880784034729, 0.31377744674682617, -0.6076655983924866, -0.5989950299263, -0.8238043189048767, -0.6884796619415283, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_Riiid__sheep-duck-llama-2
open-llm-leaderboard
2023-09-19T02:43:00Z
131
0
null
[ "region:us" ]
2023-09-19T02:43:00Z
2023-09-12T04:15:36.000Z
2023-09-12T04:15:36
--- pretty_name: Evaluation run of Riiid/sheep-duck-llama-2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Riiid/sheep-duck-llama-2](https://huggingface.co/Riiid/sheep-duck-llama-2) on\ \ the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Riiid__sheep-duck-llama-2\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-19T02:41:38.567550](https://huggingface.co/datasets/open-llm-leaderboard/details_Riiid__sheep-duck-llama-2/blob/main/results_2023-09-19T02-41-38.567550.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7074787526637408,\n\ \ \"acc_stderr\": 0.030842770794867788,\n \"acc_norm\": 0.7112713043078007,\n\ \ \"acc_norm_stderr\": 0.03081173438001915,\n \"mc1\": 0.4663402692778458,\n\ \ \"mc1_stderr\": 0.017463793867168103,\n \"mc2\": 0.6379733867215786,\n\ \ \"mc2_stderr\": 0.014804542452694204\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.6851535836177475,\n \"acc_stderr\": 0.013572657703084948,\n\ \ \"acc_norm\": 0.7226962457337884,\n \"acc_norm_stderr\": 0.013082095839059376\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6915952997410875,\n\ \ \"acc_stderr\": 0.0046089078729577085,\n \"acc_norm\": 0.8778131846245768,\n\ \ \"acc_norm_stderr\": 0.003268321260913631\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n\ \ \"acc_stderr\": 0.04171654161354543,\n \"acc_norm\": 0.6296296296296297,\n\ \ \"acc_norm_stderr\": 0.04171654161354543\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.8026315789473685,\n \"acc_stderr\": 0.03238981601699397,\n\ \ \"acc_norm\": 0.8026315789473685,\n \"acc_norm_stderr\": 0.03238981601699397\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n\ \ \"acc_stderr\": 0.04408440022768081,\n \"acc_norm\": 0.74,\n \ \ \"acc_norm_stderr\": 0.04408440022768081\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.7471698113207547,\n \"acc_stderr\": 0.02674989977124121,\n\ \ \"acc_norm\": 0.7471698113207547,\n \"acc_norm_stderr\": 0.02674989977124121\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8194444444444444,\n\ \ \"acc_stderr\": 0.03216600808802267,\n \"acc_norm\": 0.8194444444444444,\n\ \ \"acc_norm_stderr\": 0.03216600808802267\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \ \ \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n\ \ \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \ \ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n\ \ \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n\ \ \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n\ \ \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.77,\n \"acc_stderr\": 0.04229525846816507,\n \"acc_norm\": 0.77,\n\ \ \"acc_norm_stderr\": 0.04229525846816507\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.6851063829787234,\n \"acc_stderr\": 0.03036358219723817,\n\ \ \"acc_norm\": 0.6851063829787234,\n \"acc_norm_stderr\": 0.03036358219723817\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n\ \ \"acc_stderr\": 0.04692008381368909,\n \"acc_norm\": 0.4649122807017544,\n\ \ \"acc_norm_stderr\": 0.04692008381368909\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.6413793103448275,\n \"acc_stderr\": 0.03996629574876719,\n\ \ \"acc_norm\": 0.6413793103448275,\n \"acc_norm_stderr\": 0.03996629574876719\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.4708994708994709,\n \"acc_stderr\": 0.02570765861415495,\n \"\ acc_norm\": 0.4708994708994709,\n \"acc_norm_stderr\": 0.02570765861415495\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5238095238095238,\n\ \ \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.5238095238095238,\n\ \ \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \ \ \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8225806451612904,\n\ \ \"acc_stderr\": 0.021732540689329286,\n \"acc_norm\": 0.8225806451612904,\n\ \ \"acc_norm_stderr\": 0.021732540689329286\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.5665024630541872,\n \"acc_stderr\": 0.034867317274198714,\n\ \ \"acc_norm\": 0.5665024630541872,\n \"acc_norm_stderr\": 0.034867317274198714\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\"\ : 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.8484848484848485,\n \"acc_stderr\": 0.027998073798781678,\n\ \ \"acc_norm\": 0.8484848484848485,\n \"acc_norm_stderr\": 0.027998073798781678\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.8888888888888888,\n \"acc_stderr\": 0.022390787638216763,\n \"\ acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.022390787638216763\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.9378238341968912,\n \"acc_stderr\": 0.017426974154240528,\n\ \ \"acc_norm\": 0.9378238341968912,\n \"acc_norm_stderr\": 0.017426974154240528\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.7102564102564103,\n \"acc_stderr\": 0.023000628243687968,\n\ \ \"acc_norm\": 0.7102564102564103,\n \"acc_norm_stderr\": 0.023000628243687968\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.32222222222222224,\n \"acc_stderr\": 0.0284934650910286,\n \ \ \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.0284934650910286\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.7773109243697479,\n \"acc_stderr\": 0.027025433498882385,\n\ \ \"acc_norm\": 0.7773109243697479,\n \"acc_norm_stderr\": 0.027025433498882385\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.47019867549668876,\n \"acc_stderr\": 0.040752249922169775,\n \"\ acc_norm\": 0.47019867549668876,\n \"acc_norm_stderr\": 0.040752249922169775\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.9009174311926605,\n \"acc_stderr\": 0.01280978008187893,\n \"\ acc_norm\": 0.9009174311926605,\n \"acc_norm_stderr\": 0.01280978008187893\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.5972222222222222,\n \"acc_stderr\": 0.03344887382997865,\n \"\ acc_norm\": 0.5972222222222222,\n \"acc_norm_stderr\": 0.03344887382997865\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.9313725490196079,\n \"acc_stderr\": 0.017744453647073315,\n \"\ acc_norm\": 0.9313725490196079,\n \"acc_norm_stderr\": 0.017744453647073315\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.9029535864978903,\n \"acc_stderr\": 0.019269323025640255,\n \ \ \"acc_norm\": 0.9029535864978903,\n \"acc_norm_stderr\": 0.019269323025640255\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7982062780269058,\n\ \ \"acc_stderr\": 0.026936111912802273,\n \"acc_norm\": 0.7982062780269058,\n\ \ \"acc_norm_stderr\": 0.026936111912802273\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.8549618320610687,\n \"acc_stderr\": 0.030884661089515368,\n\ \ \"acc_norm\": 0.8549618320610687,\n \"acc_norm_stderr\": 0.030884661089515368\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035206,\n \"\ acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035206\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8333333333333334,\n\ \ \"acc_stderr\": 0.03602814176392645,\n \"acc_norm\": 0.8333333333333334,\n\ \ \"acc_norm_stderr\": 0.03602814176392645\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.8282208588957055,\n \"acc_stderr\": 0.029634717272371037,\n\ \ \"acc_norm\": 0.8282208588957055,\n \"acc_norm_stderr\": 0.029634717272371037\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5446428571428571,\n\ \ \"acc_stderr\": 0.04726835553719098,\n \"acc_norm\": 0.5446428571428571,\n\ \ \"acc_norm_stderr\": 0.04726835553719098\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.8446601941747572,\n \"acc_stderr\": 0.03586594738573974,\n\ \ \"acc_norm\": 0.8446601941747572,\n \"acc_norm_stderr\": 0.03586594738573974\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9145299145299145,\n\ \ \"acc_stderr\": 0.01831589168562585,\n \"acc_norm\": 0.9145299145299145,\n\ \ \"acc_norm_stderr\": 0.01831589168562585\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.71,\n \"acc_stderr\": 0.04560480215720684,\n \ \ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.04560480215720684\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8697318007662835,\n\ \ \"acc_stderr\": 0.012036729568216055,\n \"acc_norm\": 0.8697318007662835,\n\ \ \"acc_norm_stderr\": 0.012036729568216055\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7832369942196532,\n \"acc_stderr\": 0.022183477668412856,\n\ \ \"acc_norm\": 0.7832369942196532,\n \"acc_norm_stderr\": 0.022183477668412856\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.6245810055865921,\n\ \ \"acc_stderr\": 0.01619510424846353,\n \"acc_norm\": 0.6245810055865921,\n\ \ \"acc_norm_stderr\": 0.01619510424846353\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.02463004897982477,\n\ \ \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.02463004897982477\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7909967845659164,\n\ \ \"acc_stderr\": 0.02309314039837422,\n \"acc_norm\": 0.7909967845659164,\n\ \ \"acc_norm_stderr\": 0.02309314039837422\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.8425925925925926,\n \"acc_stderr\": 0.020263764996385717,\n\ \ \"acc_norm\": 0.8425925925925926,\n \"acc_norm_stderr\": 0.020263764996385717\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.5780141843971631,\n \"acc_stderr\": 0.029462189233370593,\n \ \ \"acc_norm\": 0.5780141843971631,\n \"acc_norm_stderr\": 0.029462189233370593\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5867014341590613,\n\ \ \"acc_stderr\": 0.012576779494860076,\n \"acc_norm\": 0.5867014341590613,\n\ \ \"acc_norm_stderr\": 0.012576779494860076\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.7389705882352942,\n \"acc_stderr\": 0.02667925227010314,\n\ \ \"acc_norm\": 0.7389705882352942,\n \"acc_norm_stderr\": 0.02667925227010314\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.7696078431372549,\n \"acc_stderr\": 0.01703522925803403,\n \ \ \"acc_norm\": 0.7696078431372549,\n \"acc_norm_stderr\": 0.01703522925803403\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7545454545454545,\n\ \ \"acc_stderr\": 0.041220665028782855,\n \"acc_norm\": 0.7545454545454545,\n\ \ \"acc_norm_stderr\": 0.041220665028782855\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7959183673469388,\n \"acc_stderr\": 0.025801283475090496,\n\ \ \"acc_norm\": 0.7959183673469388,\n \"acc_norm_stderr\": 0.025801283475090496\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8706467661691543,\n\ \ \"acc_stderr\": 0.023729830881018533,\n \"acc_norm\": 0.8706467661691543,\n\ \ \"acc_norm_stderr\": 0.023729830881018533\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \ \ \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n\ \ \"acc_stderr\": 0.03889951252827216,\n \"acc_norm\": 0.5180722891566265,\n\ \ \"acc_norm_stderr\": 0.03889951252827216\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8771929824561403,\n \"acc_stderr\": 0.02517298435015575,\n\ \ \"acc_norm\": 0.8771929824561403,\n \"acc_norm_stderr\": 0.02517298435015575\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4663402692778458,\n\ \ \"mc1_stderr\": 0.017463793867168103,\n \"mc2\": 0.6379733867215786,\n\ \ \"mc2_stderr\": 0.014804542452694204\n }\n}\n```" repo_url: https://huggingface.co/Riiid/sheep-duck-llama-2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|arc:challenge|25_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|arc:challenge|25_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hellaswag|10_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hellaswag|10_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-12T04-15-20.917267.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-19T02-41-38.567550.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-management|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-management|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T02-41-38.567550.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_12T04_15_20.917267 path: - '**/details_harness|truthfulqa:mc|0_2023-09-12T04-15-20.917267.parquet' - split: 2023_09_19T02_41_38.567550 path: - '**/details_harness|truthfulqa:mc|0_2023-09-19T02-41-38.567550.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-19T02-41-38.567550.parquet' - config_name: results data_files: - split: 2023_09_12T04_15_20.917267 path: - results_2023-09-12T04-15-20.917267.parquet - split: 2023_09_19T02_41_38.567550 path: - results_2023-09-19T02-41-38.567550.parquet - split: latest path: - results_2023-09-19T02-41-38.567550.parquet --- # Dataset Card for Evaluation run of Riiid/sheep-duck-llama-2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Riiid/sheep-duck-llama-2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [Riiid/sheep-duck-llama-2](https://huggingface.co/Riiid/sheep-duck-llama-2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Riiid__sheep-duck-llama-2", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-19T02:41:38.567550](https://huggingface.co/datasets/open-llm-leaderboard/details_Riiid__sheep-duck-llama-2/blob/main/results_2023-09-19T02-41-38.567550.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7074787526637408, "acc_stderr": 0.030842770794867788, "acc_norm": 0.7112713043078007, "acc_norm_stderr": 0.03081173438001915, "mc1": 0.4663402692778458, "mc1_stderr": 0.017463793867168103, "mc2": 0.6379733867215786, "mc2_stderr": 0.014804542452694204 }, "harness|arc:challenge|25": { "acc": 0.6851535836177475, "acc_stderr": 0.013572657703084948, "acc_norm": 0.7226962457337884, "acc_norm_stderr": 0.013082095839059376 }, "harness|hellaswag|10": { "acc": 0.6915952997410875, "acc_stderr": 0.0046089078729577085, "acc_norm": 0.8778131846245768, "acc_norm_stderr": 0.003268321260913631 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.04171654161354543, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.04171654161354543 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8026315789473685, "acc_stderr": 0.03238981601699397, "acc_norm": 0.8026315789473685, "acc_norm_stderr": 0.03238981601699397 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768081, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7471698113207547, "acc_stderr": 0.02674989977124121, "acc_norm": 0.7471698113207547, "acc_norm_stderr": 0.02674989977124121 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8194444444444444, "acc_stderr": 0.03216600808802267, "acc_norm": 0.8194444444444444, "acc_norm_stderr": 0.03216600808802267 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.04878608714466996, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.04878608714466996 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816507, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816507 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6851063829787234, "acc_stderr": 0.03036358219723817, "acc_norm": 0.6851063829787234, "acc_norm_stderr": 0.03036358219723817 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4649122807017544, "acc_stderr": 0.04692008381368909, "acc_norm": 0.4649122807017544, "acc_norm_stderr": 0.04692008381368909 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6413793103448275, "acc_stderr": 0.03996629574876719, "acc_norm": 0.6413793103448275, "acc_norm_stderr": 0.03996629574876719 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4708994708994709, "acc_stderr": 0.02570765861415495, "acc_norm": 0.4708994708994709, "acc_norm_stderr": 0.02570765861415495 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5238095238095238, "acc_stderr": 0.04467062628403273, "acc_norm": 0.5238095238095238, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8225806451612904, "acc_stderr": 0.021732540689329286, "acc_norm": 0.8225806451612904, "acc_norm_stderr": 0.021732540689329286 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5665024630541872, "acc_stderr": 0.034867317274198714, "acc_norm": 0.5665024630541872, "acc_norm_stderr": 0.034867317274198714 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8484848484848485, "acc_stderr": 0.027998073798781678, "acc_norm": 0.8484848484848485, "acc_norm_stderr": 0.027998073798781678 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8888888888888888, "acc_stderr": 0.022390787638216763, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.022390787638216763 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9378238341968912, "acc_stderr": 0.017426974154240528, "acc_norm": 0.9378238341968912, "acc_norm_stderr": 0.017426974154240528 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7102564102564103, "acc_stderr": 0.023000628243687968, "acc_norm": 0.7102564102564103, "acc_norm_stderr": 0.023000628243687968 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.0284934650910286, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.0284934650910286 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7773109243697479, "acc_stderr": 0.027025433498882385, "acc_norm": 0.7773109243697479, "acc_norm_stderr": 0.027025433498882385 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.47019867549668876, "acc_stderr": 0.040752249922169775, "acc_norm": 0.47019867549668876, "acc_norm_stderr": 0.040752249922169775 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9009174311926605, "acc_stderr": 0.01280978008187893, "acc_norm": 0.9009174311926605, "acc_norm_stderr": 0.01280978008187893 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5972222222222222, "acc_stderr": 0.03344887382997865, "acc_norm": 0.5972222222222222, "acc_norm_stderr": 0.03344887382997865 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9313725490196079, "acc_stderr": 0.017744453647073315, "acc_norm": 0.9313725490196079, "acc_norm_stderr": 0.017744453647073315 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.9029535864978903, "acc_stderr": 0.019269323025640255, "acc_norm": 0.9029535864978903, "acc_norm_stderr": 0.019269323025640255 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7982062780269058, "acc_stderr": 0.026936111912802273, "acc_norm": 0.7982062780269058, "acc_norm_stderr": 0.026936111912802273 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8549618320610687, "acc_stderr": 0.030884661089515368, "acc_norm": 0.8549618320610687, "acc_norm_stderr": 0.030884661089515368 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035206, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035206 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8333333333333334, "acc_stderr": 0.03602814176392645, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.03602814176392645 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8282208588957055, "acc_stderr": 0.029634717272371037, "acc_norm": 0.8282208588957055, "acc_norm_stderr": 0.029634717272371037 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5446428571428571, "acc_stderr": 0.04726835553719098, "acc_norm": 0.5446428571428571, "acc_norm_stderr": 0.04726835553719098 }, "harness|hendrycksTest-management|5": { "acc": 0.8446601941747572, "acc_stderr": 0.03586594738573974, "acc_norm": 0.8446601941747572, "acc_norm_stderr": 0.03586594738573974 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9145299145299145, "acc_stderr": 0.01831589168562585, "acc_norm": 0.9145299145299145, "acc_norm_stderr": 0.01831589168562585 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.04560480215720684, "acc_norm": 0.71, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8697318007662835, "acc_stderr": 0.012036729568216055, "acc_norm": 0.8697318007662835, "acc_norm_stderr": 0.012036729568216055 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7832369942196532, "acc_stderr": 0.022183477668412856, "acc_norm": 0.7832369942196532, "acc_norm_stderr": 0.022183477668412856 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6245810055865921, "acc_stderr": 0.01619510424846353, "acc_norm": 0.6245810055865921, "acc_norm_stderr": 0.01619510424846353 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7549019607843137, "acc_stderr": 0.02463004897982477, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.02463004897982477 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7909967845659164, "acc_stderr": 0.02309314039837422, "acc_norm": 0.7909967845659164, "acc_norm_stderr": 0.02309314039837422 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8425925925925926, "acc_stderr": 0.020263764996385717, "acc_norm": 0.8425925925925926, "acc_norm_stderr": 0.020263764996385717 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5780141843971631, "acc_stderr": 0.029462189233370593, "acc_norm": 0.5780141843971631, "acc_norm_stderr": 0.029462189233370593 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5867014341590613, "acc_stderr": 0.012576779494860076, "acc_norm": 0.5867014341590613, "acc_norm_stderr": 0.012576779494860076 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7389705882352942, "acc_stderr": 0.02667925227010314, "acc_norm": 0.7389705882352942, "acc_norm_stderr": 0.02667925227010314 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7696078431372549, "acc_stderr": 0.01703522925803403, "acc_norm": 0.7696078431372549, "acc_norm_stderr": 0.01703522925803403 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7545454545454545, "acc_stderr": 0.041220665028782855, "acc_norm": 0.7545454545454545, "acc_norm_stderr": 0.041220665028782855 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7959183673469388, "acc_stderr": 0.025801283475090496, "acc_norm": 0.7959183673469388, "acc_norm_stderr": 0.025801283475090496 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8706467661691543, "acc_stderr": 0.023729830881018533, "acc_norm": 0.8706467661691543, "acc_norm_stderr": 0.023729830881018533 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5180722891566265, "acc_stderr": 0.03889951252827216, "acc_norm": 0.5180722891566265, "acc_norm_stderr": 0.03889951252827216 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.02517298435015575, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.02517298435015575 }, "harness|truthfulqa:mc|0": { "mc1": 0.4663402692778458, "mc1_stderr": 0.017463793867168103, "mc2": 0.6379733867215786, "mc2_stderr": 0.014804542452694204 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7317215204238892, -0.8556883335113525, 0.2614770829677582, 0.257135272026062, -0.15711767971515656, -0.031170610338449478, 0.026342131197452545, -0.2442331612110138, 0.5517392158508301, -0.04183107241988182, -0.4770331382751465, -0.6807136535644531, -0.4660034477710724, 0.24079254269599...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_AIDC-ai-business__Marcoroni-70B-v1
open-llm-leaderboard
2023-09-22T18:17:15Z
131
0
null
[ "region:us" ]
2023-09-22T18:17:15Z
2023-09-22T18:16:15.000Z
2023-09-22T18:16:15
--- pretty_name: Evaluation run of AIDC-ai-business/Marcoroni-70B-v1 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [AIDC-ai-business/Marcoroni-70B-v1](https://huggingface.co/AIDC-ai-business/Marcoroni-70B-v1)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AIDC-ai-business__Marcoroni-70B-v1\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-22T18:15:51.325850](https://huggingface.co/datasets/open-llm-leaderboard/details_AIDC-ai-business__Marcoroni-70B-v1/blob/main/results_2023-09-22T18-15-51.325850.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7060854021313192,\n\ \ \"acc_stderr\": 0.030933452595170018,\n \"acc_norm\": 0.7100920024195952,\n\ \ \"acc_norm_stderr\": 0.030899690738616904,\n \"mc1\": 0.46878824969400246,\n\ \ \"mc1_stderr\": 0.017469364874577537,\n \"mc2\": 0.6440529032814905,\n\ \ \"mc2_stderr\": 0.014798378745240535\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.6877133105802048,\n \"acc_stderr\": 0.013542598541688067,\n\ \ \"acc_norm\": 0.735494880546075,\n \"acc_norm_stderr\": 0.01288927294931337\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6876120294761999,\n\ \ \"acc_stderr\": 0.004625198756710245,\n \"acc_norm\": 0.8762198765186218,\n\ \ \"acc_norm_stderr\": 0.0032865748124511866\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n\ \ \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n\ \ \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.8157894736842105,\n \"acc_stderr\": 0.031546980450822305,\n\ \ \"acc_norm\": 0.8157894736842105,\n \"acc_norm_stderr\": 0.031546980450822305\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.75,\n\ \ \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \ \ \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.7471698113207547,\n \"acc_stderr\": 0.026749899771241214,\n\ \ \"acc_norm\": 0.7471698113207547,\n \"acc_norm_stderr\": 0.026749899771241214\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8194444444444444,\n\ \ \"acc_stderr\": 0.03216600808802267,\n \"acc_norm\": 0.8194444444444444,\n\ \ \"acc_norm_stderr\": 0.03216600808802267\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.45,\n \"acc_stderr\": 0.04999999999999999,\n \ \ \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.04999999999999999\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n\ \ \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \ \ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7052023121387283,\n\ \ \"acc_stderr\": 0.03476599607516478,\n \"acc_norm\": 0.7052023121387283,\n\ \ \"acc_norm_stderr\": 0.03476599607516478\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105654,\n\ \ \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.73,\n \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\": 0.73,\n\ \ \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.7106382978723405,\n \"acc_stderr\": 0.02964400657700962,\n\ \ \"acc_norm\": 0.7106382978723405,\n \"acc_norm_stderr\": 0.02964400657700962\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n\ \ \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.45614035087719296,\n\ \ \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.6275862068965518,\n \"acc_stderr\": 0.04028731532947558,\n\ \ \"acc_norm\": 0.6275862068965518,\n \"acc_norm_stderr\": 0.04028731532947558\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.46825396825396826,\n \"acc_stderr\": 0.0256993528321318,\n \"\ acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.0256993528321318\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.49206349206349204,\n\ \ \"acc_stderr\": 0.044715725362943486,\n \"acc_norm\": 0.49206349206349204,\n\ \ \"acc_norm_stderr\": 0.044715725362943486\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \ \ \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8290322580645161,\n\ \ \"acc_stderr\": 0.021417242936321586,\n \"acc_norm\": 0.8290322580645161,\n\ \ \"acc_norm_stderr\": 0.021417242936321586\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.5566502463054187,\n \"acc_stderr\": 0.03495334582162933,\n\ \ \"acc_norm\": 0.5566502463054187,\n \"acc_norm_stderr\": 0.03495334582162933\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403325,\n \"acc_norm\"\ : 0.79,\n \"acc_norm_stderr\": 0.04093601807403325\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.8424242424242424,\n \"acc_stderr\": 0.028450388805284343,\n\ \ \"acc_norm\": 0.8424242424242424,\n \"acc_norm_stderr\": 0.028450388805284343\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.8888888888888888,\n \"acc_stderr\": 0.02239078763821677,\n \"\ acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.02239078763821677\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.9378238341968912,\n \"acc_stderr\": 0.017426974154240528,\n\ \ \"acc_norm\": 0.9378238341968912,\n \"acc_norm_stderr\": 0.017426974154240528\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.7307692307692307,\n \"acc_stderr\": 0.022489389793654817,\n\ \ \"acc_norm\": 0.7307692307692307,\n \"acc_norm_stderr\": 0.022489389793654817\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3148148148148148,\n \"acc_stderr\": 0.02831753349606647,\n \ \ \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.02831753349606647\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.7857142857142857,\n \"acc_stderr\": 0.02665353159671549,\n \ \ \"acc_norm\": 0.7857142857142857,\n \"acc_norm_stderr\": 0.02665353159671549\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.4900662251655629,\n \"acc_stderr\": 0.04081677107248436,\n \"\ acc_norm\": 0.4900662251655629,\n \"acc_norm_stderr\": 0.04081677107248436\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.8972477064220183,\n \"acc_stderr\": 0.013018246509173768,\n \"\ acc_norm\": 0.8972477064220183,\n \"acc_norm_stderr\": 0.013018246509173768\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.5925925925925926,\n \"acc_stderr\": 0.033509916046960436,\n \"\ acc_norm\": 0.5925925925925926,\n \"acc_norm_stderr\": 0.033509916046960436\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.9215686274509803,\n \"acc_stderr\": 0.018869514646658928,\n \"\ acc_norm\": 0.9215686274509803,\n \"acc_norm_stderr\": 0.018869514646658928\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.8945147679324894,\n \"acc_stderr\": 0.01999556072375854,\n \ \ \"acc_norm\": 0.8945147679324894,\n \"acc_norm_stderr\": 0.01999556072375854\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8026905829596412,\n\ \ \"acc_stderr\": 0.02670985334496796,\n \"acc_norm\": 0.8026905829596412,\n\ \ \"acc_norm_stderr\": 0.02670985334496796\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.8549618320610687,\n \"acc_stderr\": 0.030884661089515368,\n\ \ \"acc_norm\": 0.8549618320610687,\n \"acc_norm_stderr\": 0.030884661089515368\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.8677685950413223,\n \"acc_stderr\": 0.030922788320445784,\n \"\ acc_norm\": 0.8677685950413223,\n \"acc_norm_stderr\": 0.030922788320445784\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8333333333333334,\n\ \ \"acc_stderr\": 0.03602814176392645,\n \"acc_norm\": 0.8333333333333334,\n\ \ \"acc_norm_stderr\": 0.03602814176392645\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.8343558282208589,\n \"acc_stderr\": 0.029208296231259104,\n\ \ \"acc_norm\": 0.8343558282208589,\n \"acc_norm_stderr\": 0.029208296231259104\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5892857142857143,\n\ \ \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.5892857142857143,\n\ \ \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.03675668832233188,\n\ \ \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.03675668832233188\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8974358974358975,\n\ \ \"acc_stderr\": 0.019875655027867454,\n \"acc_norm\": 0.8974358974358975,\n\ \ \"acc_norm_stderr\": 0.019875655027867454\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542126,\n \ \ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542126\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8684546615581098,\n\ \ \"acc_stderr\": 0.01208670521425043,\n \"acc_norm\": 0.8684546615581098,\n\ \ \"acc_norm_stderr\": 0.01208670521425043\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7687861271676301,\n \"acc_stderr\": 0.022698657167855713,\n\ \ \"acc_norm\": 0.7687861271676301,\n \"acc_norm_stderr\": 0.022698657167855713\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.6681564245810055,\n\ \ \"acc_stderr\": 0.015748421208187303,\n \"acc_norm\": 0.6681564245810055,\n\ \ \"acc_norm_stderr\": 0.015748421208187303\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7581699346405228,\n \"acc_stderr\": 0.024518195641879334,\n\ \ \"acc_norm\": 0.7581699346405228,\n \"acc_norm_stderr\": 0.024518195641879334\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7684887459807074,\n\ \ \"acc_stderr\": 0.023956532766639133,\n \"acc_norm\": 0.7684887459807074,\n\ \ \"acc_norm_stderr\": 0.023956532766639133\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.8179012345679012,\n \"acc_stderr\": 0.021473491834808334,\n\ \ \"acc_norm\": 0.8179012345679012,\n \"acc_norm_stderr\": 0.021473491834808334\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.5638297872340425,\n \"acc_stderr\": 0.029583452036284076,\n \ \ \"acc_norm\": 0.5638297872340425,\n \"acc_norm_stderr\": 0.029583452036284076\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5788787483702738,\n\ \ \"acc_stderr\": 0.012610325733489903,\n \"acc_norm\": 0.5788787483702738,\n\ \ \"acc_norm_stderr\": 0.012610325733489903\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.7279411764705882,\n \"acc_stderr\": 0.027033041151681456,\n\ \ \"acc_norm\": 0.7279411764705882,\n \"acc_norm_stderr\": 0.027033041151681456\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.7663398692810458,\n \"acc_stderr\": 0.017119158496044506,\n \ \ \"acc_norm\": 0.7663398692810458,\n \"acc_norm_stderr\": 0.017119158496044506\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7363636363636363,\n\ \ \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.7363636363636363,\n\ \ \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7836734693877551,\n \"acc_stderr\": 0.026358916334904035,\n\ \ \"acc_norm\": 0.7836734693877551,\n \"acc_norm_stderr\": 0.026358916334904035\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8706467661691543,\n\ \ \"acc_stderr\": 0.02372983088101853,\n \"acc_norm\": 0.8706467661691543,\n\ \ \"acc_norm_stderr\": 0.02372983088101853\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \ \ \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n\ \ \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n\ \ \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8654970760233918,\n \"acc_stderr\": 0.026168221344662297,\n\ \ \"acc_norm\": 0.8654970760233918,\n \"acc_norm_stderr\": 0.026168221344662297\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.46878824969400246,\n\ \ \"mc1_stderr\": 0.017469364874577537,\n \"mc2\": 0.6440529032814905,\n\ \ \"mc2_stderr\": 0.014798378745240535\n }\n}\n```" repo_url: https://huggingface.co/AIDC-ai-business/Marcoroni-70B-v1 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|arc:challenge|25_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hellaswag|10_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-22T18-15-51.325850.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-management|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-22T18-15-51.325850.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_22T18_15_51.325850 path: - '**/details_harness|truthfulqa:mc|0_2023-09-22T18-15-51.325850.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-22T18-15-51.325850.parquet' - config_name: results data_files: - split: 2023_09_22T18_15_51.325850 path: - results_2023-09-22T18-15-51.325850.parquet - split: latest path: - results_2023-09-22T18-15-51.325850.parquet --- # Dataset Card for Evaluation run of AIDC-ai-business/Marcoroni-70B-v1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/AIDC-ai-business/Marcoroni-70B-v1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [AIDC-ai-business/Marcoroni-70B-v1](https://huggingface.co/AIDC-ai-business/Marcoroni-70B-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_AIDC-ai-business__Marcoroni-70B-v1", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-22T18:15:51.325850](https://huggingface.co/datasets/open-llm-leaderboard/details_AIDC-ai-business__Marcoroni-70B-v1/blob/main/results_2023-09-22T18-15-51.325850.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7060854021313192, "acc_stderr": 0.030933452595170018, "acc_norm": 0.7100920024195952, "acc_norm_stderr": 0.030899690738616904, "mc1": 0.46878824969400246, "mc1_stderr": 0.017469364874577537, "mc2": 0.6440529032814905, "mc2_stderr": 0.014798378745240535 }, "harness|arc:challenge|25": { "acc": 0.6877133105802048, "acc_stderr": 0.013542598541688067, "acc_norm": 0.735494880546075, "acc_norm_stderr": 0.01288927294931337 }, "harness|hellaswag|10": { "acc": 0.6876120294761999, "acc_stderr": 0.004625198756710245, "acc_norm": 0.8762198765186218, "acc_norm_stderr": 0.0032865748124511866 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8157894736842105, "acc_stderr": 0.031546980450822305, "acc_norm": 0.8157894736842105, "acc_norm_stderr": 0.031546980450822305 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7471698113207547, "acc_stderr": 0.026749899771241214, "acc_norm": 0.7471698113207547, "acc_norm_stderr": 0.026749899771241214 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8194444444444444, "acc_stderr": 0.03216600808802267, "acc_norm": 0.8194444444444444, "acc_norm_stderr": 0.03216600808802267 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.04999999999999999, "acc_norm": 0.45, "acc_norm_stderr": 0.04999999999999999 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.7052023121387283, "acc_stderr": 0.03476599607516478, "acc_norm": 0.7052023121387283, "acc_norm_stderr": 0.03476599607516478 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105654, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.73, "acc_stderr": 0.04461960433384739, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384739 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7106382978723405, "acc_stderr": 0.02964400657700962, "acc_norm": 0.7106382978723405, "acc_norm_stderr": 0.02964400657700962 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.04685473041907789, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.04685473041907789 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6275862068965518, "acc_stderr": 0.04028731532947558, "acc_norm": 0.6275862068965518, "acc_norm_stderr": 0.04028731532947558 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.46825396825396826, "acc_stderr": 0.0256993528321318, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.0256993528321318 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.49206349206349204, "acc_stderr": 0.044715725362943486, "acc_norm": 0.49206349206349204, "acc_norm_stderr": 0.044715725362943486 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8290322580645161, "acc_stderr": 0.021417242936321586, "acc_norm": 0.8290322580645161, "acc_norm_stderr": 0.021417242936321586 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5566502463054187, "acc_stderr": 0.03495334582162933, "acc_norm": 0.5566502463054187, "acc_norm_stderr": 0.03495334582162933 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.79, "acc_stderr": 0.04093601807403325, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403325 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8424242424242424, "acc_stderr": 0.028450388805284343, "acc_norm": 0.8424242424242424, "acc_norm_stderr": 0.028450388805284343 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8888888888888888, "acc_stderr": 0.02239078763821677, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.02239078763821677 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9378238341968912, "acc_stderr": 0.017426974154240528, "acc_norm": 0.9378238341968912, "acc_norm_stderr": 0.017426974154240528 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7307692307692307, "acc_stderr": 0.022489389793654817, "acc_norm": 0.7307692307692307, "acc_norm_stderr": 0.022489389793654817 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.02831753349606647, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.02831753349606647 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7857142857142857, "acc_stderr": 0.02665353159671549, "acc_norm": 0.7857142857142857, "acc_norm_stderr": 0.02665353159671549 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4900662251655629, "acc_stderr": 0.04081677107248436, "acc_norm": 0.4900662251655629, "acc_norm_stderr": 0.04081677107248436 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8972477064220183, "acc_stderr": 0.013018246509173768, "acc_norm": 0.8972477064220183, "acc_norm_stderr": 0.013018246509173768 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5925925925925926, "acc_stderr": 0.033509916046960436, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.033509916046960436 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9215686274509803, "acc_stderr": 0.018869514646658928, "acc_norm": 0.9215686274509803, "acc_norm_stderr": 0.018869514646658928 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8945147679324894, "acc_stderr": 0.01999556072375854, "acc_norm": 0.8945147679324894, "acc_norm_stderr": 0.01999556072375854 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8026905829596412, "acc_stderr": 0.02670985334496796, "acc_norm": 0.8026905829596412, "acc_norm_stderr": 0.02670985334496796 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8549618320610687, "acc_stderr": 0.030884661089515368, "acc_norm": 0.8549618320610687, "acc_norm_stderr": 0.030884661089515368 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8677685950413223, "acc_stderr": 0.030922788320445784, "acc_norm": 0.8677685950413223, "acc_norm_stderr": 0.030922788320445784 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8333333333333334, "acc_stderr": 0.03602814176392645, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.03602814176392645 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8343558282208589, "acc_stderr": 0.029208296231259104, "acc_norm": 0.8343558282208589, "acc_norm_stderr": 0.029208296231259104 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5892857142857143, "acc_stderr": 0.04669510663875191, "acc_norm": 0.5892857142857143, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.03675668832233188, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.03675668832233188 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8974358974358975, "acc_stderr": 0.019875655027867454, "acc_norm": 0.8974358974358975, "acc_norm_stderr": 0.019875655027867454 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542126, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542126 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8684546615581098, "acc_stderr": 0.01208670521425043, "acc_norm": 0.8684546615581098, "acc_norm_stderr": 0.01208670521425043 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7687861271676301, "acc_stderr": 0.022698657167855713, "acc_norm": 0.7687861271676301, "acc_norm_stderr": 0.022698657167855713 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6681564245810055, "acc_stderr": 0.015748421208187303, "acc_norm": 0.6681564245810055, "acc_norm_stderr": 0.015748421208187303 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7581699346405228, "acc_stderr": 0.024518195641879334, "acc_norm": 0.7581699346405228, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7684887459807074, "acc_stderr": 0.023956532766639133, "acc_norm": 0.7684887459807074, "acc_norm_stderr": 0.023956532766639133 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8179012345679012, "acc_stderr": 0.021473491834808334, "acc_norm": 0.8179012345679012, "acc_norm_stderr": 0.021473491834808334 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5638297872340425, "acc_stderr": 0.029583452036284076, "acc_norm": 0.5638297872340425, "acc_norm_stderr": 0.029583452036284076 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5788787483702738, "acc_stderr": 0.012610325733489903, "acc_norm": 0.5788787483702738, "acc_norm_stderr": 0.012610325733489903 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7279411764705882, "acc_stderr": 0.027033041151681456, "acc_norm": 0.7279411764705882, "acc_norm_stderr": 0.027033041151681456 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7663398692810458, "acc_stderr": 0.017119158496044506, "acc_norm": 0.7663398692810458, "acc_norm_stderr": 0.017119158496044506 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7363636363636363, "acc_stderr": 0.04220224692971987, "acc_norm": 0.7363636363636363, "acc_norm_stderr": 0.04220224692971987 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7836734693877551, "acc_stderr": 0.026358916334904035, "acc_norm": 0.7836734693877551, "acc_norm_stderr": 0.026358916334904035 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8706467661691543, "acc_stderr": 0.02372983088101853, "acc_norm": 0.8706467661691543, "acc_norm_stderr": 0.02372983088101853 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8654970760233918, "acc_stderr": 0.026168221344662297, "acc_norm": 0.8654970760233918, "acc_norm_stderr": 0.026168221344662297 }, "harness|truthfulqa:mc|0": { "mc1": 0.46878824969400246, "mc1_stderr": 0.017469364874577537, "mc2": 0.6440529032814905, "mc2_stderr": 0.014798378745240535 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7333475947380066, -0.8699398040771484, 0.25374171137809753, 0.19917581975460052, -0.13905136287212372, -0.056338485330343246, 0.002352525945752859, -0.2436092346906662, 0.5737481713294983, -0.06633112579584122, -0.47764527797698975, -0.6925200819969177, -0.4173807203769684, 0.2805031239...
null
null
null
null
null
null
null
null
null
null
null
null
null
alzoubi36/title_generation
alzoubi36
2023-10-01T12:43:11Z
131
0
null
[ "region:us" ]
2023-10-01T12:43:11Z
2023-10-01T12:43:03.000Z
2023-10-01T12:43:03
--- dataset_info: features: - name: text dtype: string - name: summary dtype: string - name: id dtype: int64 splits: - name: validation num_bytes: 1753243 num_examples: 2000 - name: test num_bytes: 1682435 num_examples: 2000 - name: train num_bytes: 17556737 num_examples: 20000 download_size: 10393931 dataset_size: 20992415 --- # Dataset Card for "title_generation" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.4907662570476532, -0.20215864479541779, 0.21429850161075592, 0.08907956629991531, -0.2158193290233612, 0.18058207631111145, 0.2527044713497162, -0.0032652418594807386, 0.6091914772987366, 0.5817525386810303, -0.9338443875312805, -0.7411656975746155, -0.5514177680015564, -0.1612248271703...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_Riiid__sheep-duck-llama-2-70b-v1.1
open-llm-leaderboard
2023-10-04T07:22:11Z
131
0
null
[ "region:us" ]
2023-10-04T07:22:11Z
2023-10-04T07:21:12.000Z
2023-10-04T07:21:12
--- pretty_name: Evaluation run of Riiid/sheep-duck-llama-2-70b-v1.1 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Riiid/sheep-duck-llama-2-70b-v1.1](https://huggingface.co/Riiid/sheep-duck-llama-2-70b-v1.1)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Riiid__sheep-duck-llama-2-70b-v1.1\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-10-04T07:20:47.805368](https://huggingface.co/datasets/open-llm-leaderboard/details_Riiid__sheep-duck-llama-2-70b-v1.1/blob/main/results_2023-10-04T07-20-47.805368.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7077701920572519,\n\ \ \"acc_stderr\": 0.030864748969041296,\n \"acc_norm\": 0.7116562654123524,\n\ \ \"acc_norm_stderr\": 0.030832223559852722,\n \"mc1\": 0.4675642594859241,\n\ \ \"mc1_stderr\": 0.017466632149577613,\n \"mc2\": 0.6457987192860579,\n\ \ \"mc2_stderr\": 0.014744553500507454\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.6877133105802048,\n \"acc_stderr\": 0.013542598541688065,\n\ \ \"acc_norm\": 0.7303754266211604,\n \"acc_norm_stderr\": 0.012968040686869154\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6914957179844653,\n\ \ \"acc_stderr\": 0.004609320024893897,\n \"acc_norm\": 0.8781119298944433,\n\ \ \"acc_norm_stderr\": 0.003264878737586879\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n\ \ \"acc_stderr\": 0.04171654161354543,\n \"acc_norm\": 0.6296296296296297,\n\ \ \"acc_norm_stderr\": 0.04171654161354543\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.8092105263157895,\n \"acc_stderr\": 0.03197565821032499,\n\ \ \"acc_norm\": 0.8092105263157895,\n \"acc_norm_stderr\": 0.03197565821032499\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.75,\n\ \ \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \ \ \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.7509433962264151,\n \"acc_stderr\": 0.026616482980501704,\n\ \ \"acc_norm\": 0.7509433962264151,\n \"acc_norm_stderr\": 0.026616482980501704\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8194444444444444,\n\ \ \"acc_stderr\": 0.03216600808802267,\n \"acc_norm\": 0.8194444444444444,\n\ \ \"acc_norm_stderr\": 0.03216600808802267\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \ \ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\"\ : 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n\ \ \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.6820809248554913,\n\ \ \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.048108401480826346,\n\ \ \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.048108401480826346\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n\ \ \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.6936170212765957,\n \"acc_stderr\": 0.030135906478517563,\n\ \ \"acc_norm\": 0.6936170212765957,\n \"acc_norm_stderr\": 0.030135906478517563\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n\ \ \"acc_stderr\": 0.04692008381368909,\n \"acc_norm\": 0.4649122807017544,\n\ \ \"acc_norm_stderr\": 0.04692008381368909\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.6413793103448275,\n \"acc_stderr\": 0.03996629574876719,\n\ \ \"acc_norm\": 0.6413793103448275,\n \"acc_norm_stderr\": 0.03996629574876719\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.48148148148148145,\n \"acc_stderr\": 0.02573364199183898,\n \"\ acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.02573364199183898\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5,\n\ \ \"acc_stderr\": 0.04472135954999579,\n \"acc_norm\": 0.5,\n \ \ \"acc_norm_stderr\": 0.04472135954999579\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \ \ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.8193548387096774,\n \"acc_stderr\": 0.021886178567172523,\n \"\ acc_norm\": 0.8193548387096774,\n \"acc_norm_stderr\": 0.021886178567172523\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.5566502463054187,\n \"acc_stderr\": 0.03495334582162933,\n \"\ acc_norm\": 0.5566502463054187,\n \"acc_norm_stderr\": 0.03495334582162933\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\"\ : 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.8484848484848485,\n \"acc_stderr\": 0.027998073798781678,\n\ \ \"acc_norm\": 0.8484848484848485,\n \"acc_norm_stderr\": 0.027998073798781678\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.8939393939393939,\n \"acc_stderr\": 0.02193804773885311,\n \"\ acc_norm\": 0.8939393939393939,\n \"acc_norm_stderr\": 0.02193804773885311\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.9430051813471503,\n \"acc_stderr\": 0.01673108529360755,\n\ \ \"acc_norm\": 0.9430051813471503,\n \"acc_norm_stderr\": 0.01673108529360755\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.7128205128205128,\n \"acc_stderr\": 0.02293992541853062,\n \ \ \"acc_norm\": 0.7128205128205128,\n \"acc_norm_stderr\": 0.02293992541853062\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131137,\n \ \ \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131137\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.7689075630252101,\n \"acc_stderr\": 0.027381406927868876,\n\ \ \"acc_norm\": 0.7689075630252101,\n \"acc_norm_stderr\": 0.027381406927868876\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.48344370860927155,\n \"acc_stderr\": 0.040802441856289715,\n \"\ acc_norm\": 0.48344370860927155,\n \"acc_norm_stderr\": 0.040802441856289715\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.9009174311926605,\n \"acc_stderr\": 0.01280978008187893,\n \"\ acc_norm\": 0.9009174311926605,\n \"acc_norm_stderr\": 0.01280978008187893\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.6018518518518519,\n \"acc_stderr\": 0.033384734032074016,\n \"\ acc_norm\": 0.6018518518518519,\n \"acc_norm_stderr\": 0.033384734032074016\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.9264705882352942,\n \"acc_stderr\": 0.01831885585008968,\n \"\ acc_norm\": 0.9264705882352942,\n \"acc_norm_stderr\": 0.01831885585008968\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.8987341772151899,\n \"acc_stderr\": 0.019637720526065498,\n \ \ \"acc_norm\": 0.8987341772151899,\n \"acc_norm_stderr\": 0.019637720526065498\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7892376681614349,\n\ \ \"acc_stderr\": 0.027373095500540186,\n \"acc_norm\": 0.7892376681614349,\n\ \ \"acc_norm_stderr\": 0.027373095500540186\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.8473282442748091,\n \"acc_stderr\": 0.031545216720054725,\n\ \ \"acc_norm\": 0.8473282442748091,\n \"acc_norm_stderr\": 0.031545216720054725\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.8677685950413223,\n \"acc_stderr\": 0.03092278832044579,\n \"\ acc_norm\": 0.8677685950413223,\n \"acc_norm_stderr\": 0.03092278832044579\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8333333333333334,\n\ \ \"acc_stderr\": 0.03602814176392645,\n \"acc_norm\": 0.8333333333333334,\n\ \ \"acc_norm_stderr\": 0.03602814176392645\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.8282208588957055,\n \"acc_stderr\": 0.029634717272371037,\n\ \ \"acc_norm\": 0.8282208588957055,\n \"acc_norm_stderr\": 0.029634717272371037\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5535714285714286,\n\ \ \"acc_stderr\": 0.047184714852195865,\n \"acc_norm\": 0.5535714285714286,\n\ \ \"acc_norm_stderr\": 0.047184714852195865\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.8446601941747572,\n \"acc_stderr\": 0.03586594738573974,\n\ \ \"acc_norm\": 0.8446601941747572,\n \"acc_norm_stderr\": 0.03586594738573974\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9188034188034188,\n\ \ \"acc_stderr\": 0.017893784904018516,\n \"acc_norm\": 0.9188034188034188,\n\ \ \"acc_norm_stderr\": 0.017893784904018516\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542126,\n \ \ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542126\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8659003831417624,\n\ \ \"acc_stderr\": 0.012185528166499978,\n \"acc_norm\": 0.8659003831417624,\n\ \ \"acc_norm_stderr\": 0.012185528166499978\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7745664739884393,\n \"acc_stderr\": 0.022497230190967554,\n\ \ \"acc_norm\": 0.7745664739884393,\n \"acc_norm_stderr\": 0.022497230190967554\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.6368715083798883,\n\ \ \"acc_stderr\": 0.016083749986853708,\n \"acc_norm\": 0.6368715083798883,\n\ \ \"acc_norm_stderr\": 0.016083749986853708\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7516339869281046,\n \"acc_stderr\": 0.02473998135511359,\n\ \ \"acc_norm\": 0.7516339869281046,\n \"acc_norm_stderr\": 0.02473998135511359\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.77491961414791,\n\ \ \"acc_stderr\": 0.023720088516179027,\n \"acc_norm\": 0.77491961414791,\n\ \ \"acc_norm_stderr\": 0.023720088516179027\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.020736358408060002,\n\ \ \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.020736358408060002\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.5780141843971631,\n \"acc_stderr\": 0.02946218923337059,\n \ \ \"acc_norm\": 0.5780141843971631,\n \"acc_norm_stderr\": 0.02946218923337059\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.589960886571056,\n\ \ \"acc_stderr\": 0.012561837621962028,\n \"acc_norm\": 0.589960886571056,\n\ \ \"acc_norm_stderr\": 0.012561837621962028\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.7426470588235294,\n \"acc_stderr\": 0.0265565194700415,\n\ \ \"acc_norm\": 0.7426470588235294,\n \"acc_norm_stderr\": 0.0265565194700415\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.7696078431372549,\n \"acc_stderr\": 0.01703522925803403,\n \ \ \"acc_norm\": 0.7696078431372549,\n \"acc_norm_stderr\": 0.01703522925803403\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7454545454545455,\n\ \ \"acc_stderr\": 0.041723430387053825,\n \"acc_norm\": 0.7454545454545455,\n\ \ \"acc_norm_stderr\": 0.041723430387053825\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7959183673469388,\n \"acc_stderr\": 0.025801283475090496,\n\ \ \"acc_norm\": 0.7959183673469388,\n \"acc_norm_stderr\": 0.025801283475090496\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8756218905472637,\n\ \ \"acc_stderr\": 0.023335401790166327,\n \"acc_norm\": 0.8756218905472637,\n\ \ \"acc_norm_stderr\": 0.023335401790166327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \ \ \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n\ \ \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n\ \ \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8771929824561403,\n \"acc_stderr\": 0.02517298435015575,\n\ \ \"acc_norm\": 0.8771929824561403,\n \"acc_norm_stderr\": 0.02517298435015575\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4675642594859241,\n\ \ \"mc1_stderr\": 0.017466632149577613,\n \"mc2\": 0.6457987192860579,\n\ \ \"mc2_stderr\": 0.014744553500507454\n }\n}\n```" repo_url: https://huggingface.co/Riiid/sheep-duck-llama-2-70b-v1.1 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|arc:challenge|25_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hellaswag|10_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-04T07-20-47.805368.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-management|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-virology|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-04T07-20-47.805368.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_10_04T07_20_47.805368 path: - '**/details_harness|truthfulqa:mc|0_2023-10-04T07-20-47.805368.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-10-04T07-20-47.805368.parquet' - config_name: results data_files: - split: 2023_10_04T07_20_47.805368 path: - results_2023-10-04T07-20-47.805368.parquet - split: latest path: - results_2023-10-04T07-20-47.805368.parquet --- # Dataset Card for Evaluation run of Riiid/sheep-duck-llama-2-70b-v1.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Riiid/sheep-duck-llama-2-70b-v1.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [Riiid/sheep-duck-llama-2-70b-v1.1](https://huggingface.co/Riiid/sheep-duck-llama-2-70b-v1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Riiid__sheep-duck-llama-2-70b-v1.1", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-10-04T07:20:47.805368](https://huggingface.co/datasets/open-llm-leaderboard/details_Riiid__sheep-duck-llama-2-70b-v1.1/blob/main/results_2023-10-04T07-20-47.805368.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7077701920572519, "acc_stderr": 0.030864748969041296, "acc_norm": 0.7116562654123524, "acc_norm_stderr": 0.030832223559852722, "mc1": 0.4675642594859241, "mc1_stderr": 0.017466632149577613, "mc2": 0.6457987192860579, "mc2_stderr": 0.014744553500507454 }, "harness|arc:challenge|25": { "acc": 0.6877133105802048, "acc_stderr": 0.013542598541688065, "acc_norm": 0.7303754266211604, "acc_norm_stderr": 0.012968040686869154 }, "harness|hellaswag|10": { "acc": 0.6914957179844653, "acc_stderr": 0.004609320024893897, "acc_norm": 0.8781119298944433, "acc_norm_stderr": 0.003264878737586879 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.04171654161354543, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.04171654161354543 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8092105263157895, "acc_stderr": 0.03197565821032499, "acc_norm": 0.8092105263157895, "acc_norm_stderr": 0.03197565821032499 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7509433962264151, "acc_stderr": 0.026616482980501704, "acc_norm": 0.7509433962264151, "acc_norm_stderr": 0.026616482980501704 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8194444444444444, "acc_stderr": 0.03216600808802267, "acc_norm": 0.8194444444444444, "acc_norm_stderr": 0.03216600808802267 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6820809248554913, "acc_stderr": 0.0355068398916558, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.0355068398916558 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.048108401480826346, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.048108401480826346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6936170212765957, "acc_stderr": 0.030135906478517563, "acc_norm": 0.6936170212765957, "acc_norm_stderr": 0.030135906478517563 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4649122807017544, "acc_stderr": 0.04692008381368909, "acc_norm": 0.4649122807017544, "acc_norm_stderr": 0.04692008381368909 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6413793103448275, "acc_stderr": 0.03996629574876719, "acc_norm": 0.6413793103448275, "acc_norm_stderr": 0.03996629574876719 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.02573364199183898, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.02573364199183898 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5, "acc_stderr": 0.04472135954999579, "acc_norm": 0.5, "acc_norm_stderr": 0.04472135954999579 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8193548387096774, "acc_stderr": 0.021886178567172523, "acc_norm": 0.8193548387096774, "acc_norm_stderr": 0.021886178567172523 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5566502463054187, "acc_stderr": 0.03495334582162933, "acc_norm": 0.5566502463054187, "acc_norm_stderr": 0.03495334582162933 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8484848484848485, "acc_stderr": 0.027998073798781678, "acc_norm": 0.8484848484848485, "acc_norm_stderr": 0.027998073798781678 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8939393939393939, "acc_stderr": 0.02193804773885311, "acc_norm": 0.8939393939393939, "acc_norm_stderr": 0.02193804773885311 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9430051813471503, "acc_stderr": 0.01673108529360755, "acc_norm": 0.9430051813471503, "acc_norm_stderr": 0.01673108529360755 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7128205128205128, "acc_stderr": 0.02293992541853062, "acc_norm": 0.7128205128205128, "acc_norm_stderr": 0.02293992541853062 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.028897748741131137, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.028897748741131137 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7689075630252101, "acc_stderr": 0.027381406927868876, "acc_norm": 0.7689075630252101, "acc_norm_stderr": 0.027381406927868876 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.48344370860927155, "acc_stderr": 0.040802441856289715, "acc_norm": 0.48344370860927155, "acc_norm_stderr": 0.040802441856289715 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9009174311926605, "acc_stderr": 0.01280978008187893, "acc_norm": 0.9009174311926605, "acc_norm_stderr": 0.01280978008187893 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6018518518518519, "acc_stderr": 0.033384734032074016, "acc_norm": 0.6018518518518519, "acc_norm_stderr": 0.033384734032074016 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9264705882352942, "acc_stderr": 0.01831885585008968, "acc_norm": 0.9264705882352942, "acc_norm_stderr": 0.01831885585008968 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8987341772151899, "acc_stderr": 0.019637720526065498, "acc_norm": 0.8987341772151899, "acc_norm_stderr": 0.019637720526065498 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7892376681614349, "acc_stderr": 0.027373095500540186, "acc_norm": 0.7892376681614349, "acc_norm_stderr": 0.027373095500540186 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8473282442748091, "acc_stderr": 0.031545216720054725, "acc_norm": 0.8473282442748091, "acc_norm_stderr": 0.031545216720054725 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8677685950413223, "acc_stderr": 0.03092278832044579, "acc_norm": 0.8677685950413223, "acc_norm_stderr": 0.03092278832044579 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8333333333333334, "acc_stderr": 0.03602814176392645, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.03602814176392645 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8282208588957055, "acc_stderr": 0.029634717272371037, "acc_norm": 0.8282208588957055, "acc_norm_stderr": 0.029634717272371037 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5535714285714286, "acc_stderr": 0.047184714852195865, "acc_norm": 0.5535714285714286, "acc_norm_stderr": 0.047184714852195865 }, "harness|hendrycksTest-management|5": { "acc": 0.8446601941747572, "acc_stderr": 0.03586594738573974, "acc_norm": 0.8446601941747572, "acc_norm_stderr": 0.03586594738573974 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9188034188034188, "acc_stderr": 0.017893784904018516, "acc_norm": 0.9188034188034188, "acc_norm_stderr": 0.017893784904018516 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542126, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542126 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8659003831417624, "acc_stderr": 0.012185528166499978, "acc_norm": 0.8659003831417624, "acc_norm_stderr": 0.012185528166499978 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7745664739884393, "acc_stderr": 0.022497230190967554, "acc_norm": 0.7745664739884393, "acc_norm_stderr": 0.022497230190967554 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6368715083798883, "acc_stderr": 0.016083749986853708, "acc_norm": 0.6368715083798883, "acc_norm_stderr": 0.016083749986853708 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7516339869281046, "acc_stderr": 0.02473998135511359, "acc_norm": 0.7516339869281046, "acc_norm_stderr": 0.02473998135511359 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.77491961414791, "acc_stderr": 0.023720088516179027, "acc_norm": 0.77491961414791, "acc_norm_stderr": 0.023720088516179027 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8333333333333334, "acc_stderr": 0.020736358408060002, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.020736358408060002 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5780141843971631, "acc_stderr": 0.02946218923337059, "acc_norm": 0.5780141843971631, "acc_norm_stderr": 0.02946218923337059 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.589960886571056, "acc_stderr": 0.012561837621962028, "acc_norm": 0.589960886571056, "acc_norm_stderr": 0.012561837621962028 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7426470588235294, "acc_stderr": 0.0265565194700415, "acc_norm": 0.7426470588235294, "acc_norm_stderr": 0.0265565194700415 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7696078431372549, "acc_stderr": 0.01703522925803403, "acc_norm": 0.7696078431372549, "acc_norm_stderr": 0.01703522925803403 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7454545454545455, "acc_stderr": 0.041723430387053825, "acc_norm": 0.7454545454545455, "acc_norm_stderr": 0.041723430387053825 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7959183673469388, "acc_stderr": 0.025801283475090496, "acc_norm": 0.7959183673469388, "acc_norm_stderr": 0.025801283475090496 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8756218905472637, "acc_stderr": 0.023335401790166327, "acc_norm": 0.8756218905472637, "acc_norm_stderr": 0.023335401790166327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.02517298435015575, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.02517298435015575 }, "harness|truthfulqa:mc|0": { "mc1": 0.4675642594859241, "mc1_stderr": 0.017466632149577613, "mc2": 0.6457987192860579, "mc2_stderr": 0.014744553500507454 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7379705309867859, -0.854792058467865, 0.23793216049671173, 0.2642776072025299, -0.15890051424503326, -0.03532427176833153, 0.018922943621873856, -0.23592889308929443, 0.5752972364425659, -0.03283199295401573, -0.4990417957305908, -0.6783965229988098, -0.45424309372901917, 0.258032202720...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_sequelbox__StellarBright
open-llm-leaderboard
2023-10-11T03:36:24Z
131
0
null
[ "region:us" ]
2023-10-11T03:36:24Z
2023-10-11T03:35:24.000Z
2023-10-11T03:35:24
--- pretty_name: Evaluation run of sequelbox/StellarBright dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [sequelbox/StellarBright](https://huggingface.co/sequelbox/StellarBright) on the\ \ [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_sequelbox__StellarBright\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-10-11T03:35:00.957425](https://huggingface.co/datasets/open-llm-leaderboard/details_sequelbox__StellarBright/blob/main/results_2023-10-11T03-35-00.957425.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7109524643752221,\n\ \ \"acc_stderr\": 0.030739601585983465,\n \"acc_norm\": 0.7148315560048047,\n\ \ \"acc_norm_stderr\": 0.030707363721296215,\n \"mc1\": 0.46511627906976744,\n\ \ \"mc1_stderr\": 0.017460849975873965,\n \"mc2\": 0.6446460697306154,\n\ \ \"mc2_stderr\": 0.014753033588623255\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.6885665529010239,\n \"acc_stderr\": 0.013532472099850945,\n\ \ \"acc_norm\": 0.7295221843003413,\n \"acc_norm_stderr\": 0.012980954547659556\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.690300736904999,\n\ \ \"acc_stderr\": 0.004614246282055375,\n \"acc_norm\": 0.8782115116510655,\n\ \ \"acc_norm_stderr\": 0.0032637298176987762\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \ \ \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n\ \ \"acc_stderr\": 0.041539484047424,\n \"acc_norm\": 0.6370370370370371,\n\ \ \"acc_norm_stderr\": 0.041539484047424\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.8092105263157895,\n \"acc_stderr\": 0.03197565821032499,\n\ \ \"acc_norm\": 0.8092105263157895,\n \"acc_norm_stderr\": 0.03197565821032499\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.78,\n\ \ \"acc_stderr\": 0.041633319989322605,\n \"acc_norm\": 0.78,\n \ \ \"acc_norm_stderr\": 0.041633319989322605\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.7433962264150943,\n \"acc_stderr\": 0.026880647889051985,\n\ \ \"acc_norm\": 0.7433962264150943,\n \"acc_norm_stderr\": 0.026880647889051985\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8263888888888888,\n\ \ \"acc_stderr\": 0.03167473383795717,\n \"acc_norm\": 0.8263888888888888,\n\ \ \"acc_norm_stderr\": 0.03167473383795717\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \ \ \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n\ \ \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6936416184971098,\n\ \ \"acc_stderr\": 0.03514942551267439,\n \"acc_norm\": 0.6936416184971098,\n\ \ \"acc_norm_stderr\": 0.03514942551267439\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105654,\n\ \ \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n\ \ \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.7106382978723405,\n \"acc_stderr\": 0.02964400657700962,\n\ \ \"acc_norm\": 0.7106382978723405,\n \"acc_norm_stderr\": 0.02964400657700962\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.47368421052631576,\n\ \ \"acc_stderr\": 0.04697085136647863,\n \"acc_norm\": 0.47368421052631576,\n\ \ \"acc_norm_stderr\": 0.04697085136647863\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.6413793103448275,\n \"acc_stderr\": 0.039966295748767186,\n\ \ \"acc_norm\": 0.6413793103448275,\n \"acc_norm_stderr\": 0.039966295748767186\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.47883597883597884,\n \"acc_stderr\": 0.025728230952130723,\n \"\ acc_norm\": 0.47883597883597884,\n \"acc_norm_stderr\": 0.025728230952130723\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5158730158730159,\n\ \ \"acc_stderr\": 0.044698818540726076,\n \"acc_norm\": 0.5158730158730159,\n\ \ \"acc_norm_stderr\": 0.044698818540726076\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \ \ \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8161290322580645,\n\ \ \"acc_stderr\": 0.022037217340267826,\n \"acc_norm\": 0.8161290322580645,\n\ \ \"acc_norm_stderr\": 0.022037217340267826\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.03499113137676744,\n\ \ \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.03499113137676744\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\"\ : 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.8484848484848485,\n \"acc_stderr\": 0.027998073798781678,\n\ \ \"acc_norm\": 0.8484848484848485,\n \"acc_norm_stderr\": 0.027998073798781678\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.8888888888888888,\n \"acc_stderr\": 0.022390787638216763,\n \"\ acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.022390787638216763\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.9430051813471503,\n \"acc_stderr\": 0.01673108529360755,\n\ \ \"acc_norm\": 0.9430051813471503,\n \"acc_norm_stderr\": 0.01673108529360755\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.7102564102564103,\n \"acc_stderr\": 0.023000628243687968,\n\ \ \"acc_norm\": 0.7102564102564103,\n \"acc_norm_stderr\": 0.023000628243687968\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253252,\n \ \ \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253252\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.7773109243697479,\n \"acc_stderr\": 0.027025433498882392,\n\ \ \"acc_norm\": 0.7773109243697479,\n \"acc_norm_stderr\": 0.027025433498882392\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.4900662251655629,\n \"acc_stderr\": 0.04081677107248436,\n \"\ acc_norm\": 0.4900662251655629,\n \"acc_norm_stderr\": 0.04081677107248436\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.9119266055045872,\n \"acc_stderr\": 0.01215074371948166,\n \"\ acc_norm\": 0.9119266055045872,\n \"acc_norm_stderr\": 0.01215074371948166\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.625,\n \"acc_stderr\": 0.033016908987210894,\n \"acc_norm\": 0.625,\n\ \ \"acc_norm_stderr\": 0.033016908987210894\n },\n \"harness|hendrycksTest-high_school_us_history|5\"\ : {\n \"acc\": 0.9215686274509803,\n \"acc_stderr\": 0.018869514646658925,\n\ \ \"acc_norm\": 0.9215686274509803,\n \"acc_norm_stderr\": 0.018869514646658925\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.8945147679324894,\n \"acc_stderr\": 0.01999556072375854,\n \ \ \"acc_norm\": 0.8945147679324894,\n \"acc_norm_stderr\": 0.01999556072375854\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7847533632286996,\n\ \ \"acc_stderr\": 0.027584066602208274,\n \"acc_norm\": 0.7847533632286996,\n\ \ \"acc_norm_stderr\": 0.027584066602208274\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.8473282442748091,\n \"acc_stderr\": 0.031545216720054725,\n\ \ \"acc_norm\": 0.8473282442748091,\n \"acc_norm_stderr\": 0.031545216720054725\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035206,\n \"\ acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035206\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8333333333333334,\n\ \ \"acc_stderr\": 0.03602814176392645,\n \"acc_norm\": 0.8333333333333334,\n\ \ \"acc_norm_stderr\": 0.03602814176392645\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.8220858895705522,\n \"acc_stderr\": 0.03004735765580662,\n\ \ \"acc_norm\": 0.8220858895705522,\n \"acc_norm_stderr\": 0.03004735765580662\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5714285714285714,\n\ \ \"acc_stderr\": 0.04697113923010213,\n \"acc_norm\": 0.5714285714285714,\n\ \ \"acc_norm_stderr\": 0.04697113923010213\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.034926064766237906,\n\ \ \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.034926064766237906\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9145299145299145,\n\ \ \"acc_stderr\": 0.018315891685625845,\n \"acc_norm\": 0.9145299145299145,\n\ \ \"acc_norm_stderr\": 0.018315891685625845\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542126,\n \ \ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542126\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8710089399744572,\n\ \ \"acc_stderr\": 0.011986371548086867,\n \"acc_norm\": 0.8710089399744572,\n\ \ \"acc_norm_stderr\": 0.011986371548086867\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7832369942196532,\n \"acc_stderr\": 0.022183477668412856,\n\ \ \"acc_norm\": 0.7832369942196532,\n \"acc_norm_stderr\": 0.022183477668412856\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.6245810055865921,\n\ \ \"acc_stderr\": 0.01619510424846353,\n \"acc_norm\": 0.6245810055865921,\n\ \ \"acc_norm_stderr\": 0.01619510424846353\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7581699346405228,\n \"acc_stderr\": 0.024518195641879334,\n\ \ \"acc_norm\": 0.7581699346405228,\n \"acc_norm_stderr\": 0.024518195641879334\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7877813504823151,\n\ \ \"acc_stderr\": 0.023222756797435115,\n \"acc_norm\": 0.7877813504823151,\n\ \ \"acc_norm_stderr\": 0.023222756797435115\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.020736358408060002,\n\ \ \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.020736358408060002\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.5957446808510638,\n \"acc_stderr\": 0.02927553215970472,\n \ \ \"acc_norm\": 0.5957446808510638,\n \"acc_norm_stderr\": 0.02927553215970472\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5827900912646675,\n\ \ \"acc_stderr\": 0.012593959992906427,\n \"acc_norm\": 0.5827900912646675,\n\ \ \"acc_norm_stderr\": 0.012593959992906427\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.75,\n \"acc_stderr\": 0.026303648393696036,\n \ \ \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.026303648393696036\n \ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\"\ : 0.7728758169934641,\n \"acc_stderr\": 0.016949853279212373,\n \"\ acc_norm\": 0.7728758169934641,\n \"acc_norm_stderr\": 0.016949853279212373\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7454545454545455,\n\ \ \"acc_stderr\": 0.041723430387053825,\n \"acc_norm\": 0.7454545454545455,\n\ \ \"acc_norm_stderr\": 0.041723430387053825\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7918367346938775,\n \"acc_stderr\": 0.0259911176728133,\n\ \ \"acc_norm\": 0.7918367346938775,\n \"acc_norm_stderr\": 0.0259911176728133\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8805970149253731,\n\ \ \"acc_stderr\": 0.02292879327721974,\n \"acc_norm\": 0.8805970149253731,\n\ \ \"acc_norm_stderr\": 0.02292879327721974\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \ \ \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n\ \ \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n\ \ \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8771929824561403,\n \"acc_stderr\": 0.02517298435015575,\n\ \ \"acc_norm\": 0.8771929824561403,\n \"acc_norm_stderr\": 0.02517298435015575\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.46511627906976744,\n\ \ \"mc1_stderr\": 0.017460849975873965,\n \"mc2\": 0.6446460697306154,\n\ \ \"mc2_stderr\": 0.014753033588623255\n }\n}\n```" repo_url: https://huggingface.co/sequelbox/StellarBright leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|arc:challenge|25_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hellaswag|10_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-management|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-10-11T03-35-00.957425.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-management|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-virology|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-10-11T03-35-00.957425.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_10_11T03_35_00.957425 path: - '**/details_harness|truthfulqa:mc|0_2023-10-11T03-35-00.957425.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-10-11T03-35-00.957425.parquet' - config_name: results data_files: - split: 2023_10_11T03_35_00.957425 path: - results_2023-10-11T03-35-00.957425.parquet - split: latest path: - results_2023-10-11T03-35-00.957425.parquet --- # Dataset Card for Evaluation run of sequelbox/StellarBright ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/sequelbox/StellarBright - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [sequelbox/StellarBright](https://huggingface.co/sequelbox/StellarBright) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_sequelbox__StellarBright", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-10-11T03:35:00.957425](https://huggingface.co/datasets/open-llm-leaderboard/details_sequelbox__StellarBright/blob/main/results_2023-10-11T03-35-00.957425.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7109524643752221, "acc_stderr": 0.030739601585983465, "acc_norm": 0.7148315560048047, "acc_norm_stderr": 0.030707363721296215, "mc1": 0.46511627906976744, "mc1_stderr": 0.017460849975873965, "mc2": 0.6446460697306154, "mc2_stderr": 0.014753033588623255 }, "harness|arc:challenge|25": { "acc": 0.6885665529010239, "acc_stderr": 0.013532472099850945, "acc_norm": 0.7295221843003413, "acc_norm_stderr": 0.012980954547659556 }, "harness|hellaswag|10": { "acc": 0.690300736904999, "acc_stderr": 0.004614246282055375, "acc_norm": 0.8782115116510655, "acc_norm_stderr": 0.0032637298176987762 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.041539484047424, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.041539484047424 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8092105263157895, "acc_stderr": 0.03197565821032499, "acc_norm": 0.8092105263157895, "acc_norm_stderr": 0.03197565821032499 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.78, "acc_stderr": 0.041633319989322605, "acc_norm": 0.78, "acc_norm_stderr": 0.041633319989322605 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7433962264150943, "acc_stderr": 0.026880647889051985, "acc_norm": 0.7433962264150943, "acc_norm_stderr": 0.026880647889051985 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8263888888888888, "acc_stderr": 0.03167473383795717, "acc_norm": 0.8263888888888888, "acc_norm_stderr": 0.03167473383795717 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6936416184971098, "acc_stderr": 0.03514942551267439, "acc_norm": 0.6936416184971098, "acc_norm_stderr": 0.03514942551267439 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105654, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7106382978723405, "acc_stderr": 0.02964400657700962, "acc_norm": 0.7106382978723405, "acc_norm_stderr": 0.02964400657700962 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.47368421052631576, "acc_stderr": 0.04697085136647863, "acc_norm": 0.47368421052631576, "acc_norm_stderr": 0.04697085136647863 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6413793103448275, "acc_stderr": 0.039966295748767186, "acc_norm": 0.6413793103448275, "acc_norm_stderr": 0.039966295748767186 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.47883597883597884, "acc_stderr": 0.025728230952130723, "acc_norm": 0.47883597883597884, "acc_norm_stderr": 0.025728230952130723 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5158730158730159, "acc_stderr": 0.044698818540726076, "acc_norm": 0.5158730158730159, "acc_norm_stderr": 0.044698818540726076 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8161290322580645, "acc_stderr": 0.022037217340267826, "acc_norm": 0.8161290322580645, "acc_norm_stderr": 0.022037217340267826 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5517241379310345, "acc_stderr": 0.03499113137676744, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.03499113137676744 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8484848484848485, "acc_stderr": 0.027998073798781678, "acc_norm": 0.8484848484848485, "acc_norm_stderr": 0.027998073798781678 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8888888888888888, "acc_stderr": 0.022390787638216763, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.022390787638216763 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9430051813471503, "acc_stderr": 0.01673108529360755, "acc_norm": 0.9430051813471503, "acc_norm_stderr": 0.01673108529360755 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7102564102564103, "acc_stderr": 0.023000628243687968, "acc_norm": 0.7102564102564103, "acc_norm_stderr": 0.023000628243687968 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.028820884666253252, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.028820884666253252 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7773109243697479, "acc_stderr": 0.027025433498882392, "acc_norm": 0.7773109243697479, "acc_norm_stderr": 0.027025433498882392 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4900662251655629, "acc_stderr": 0.04081677107248436, "acc_norm": 0.4900662251655629, "acc_norm_stderr": 0.04081677107248436 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9119266055045872, "acc_stderr": 0.01215074371948166, "acc_norm": 0.9119266055045872, "acc_norm_stderr": 0.01215074371948166 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.625, "acc_stderr": 0.033016908987210894, "acc_norm": 0.625, "acc_norm_stderr": 0.033016908987210894 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9215686274509803, "acc_stderr": 0.018869514646658925, "acc_norm": 0.9215686274509803, "acc_norm_stderr": 0.018869514646658925 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8945147679324894, "acc_stderr": 0.01999556072375854, "acc_norm": 0.8945147679324894, "acc_norm_stderr": 0.01999556072375854 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7847533632286996, "acc_stderr": 0.027584066602208274, "acc_norm": 0.7847533632286996, "acc_norm_stderr": 0.027584066602208274 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8473282442748091, "acc_stderr": 0.031545216720054725, "acc_norm": 0.8473282442748091, "acc_norm_stderr": 0.031545216720054725 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035206, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035206 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8333333333333334, "acc_stderr": 0.03602814176392645, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.03602814176392645 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8220858895705522, "acc_stderr": 0.03004735765580662, "acc_norm": 0.8220858895705522, "acc_norm_stderr": 0.03004735765580662 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5714285714285714, "acc_stderr": 0.04697113923010213, "acc_norm": 0.5714285714285714, "acc_norm_stderr": 0.04697113923010213 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.034926064766237906, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.034926064766237906 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9145299145299145, "acc_stderr": 0.018315891685625845, "acc_norm": 0.9145299145299145, "acc_norm_stderr": 0.018315891685625845 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542126, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542126 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8710089399744572, "acc_stderr": 0.011986371548086867, "acc_norm": 0.8710089399744572, "acc_norm_stderr": 0.011986371548086867 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7832369942196532, "acc_stderr": 0.022183477668412856, "acc_norm": 0.7832369942196532, "acc_norm_stderr": 0.022183477668412856 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6245810055865921, "acc_stderr": 0.01619510424846353, "acc_norm": 0.6245810055865921, "acc_norm_stderr": 0.01619510424846353 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7581699346405228, "acc_stderr": 0.024518195641879334, "acc_norm": 0.7581699346405228, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7877813504823151, "acc_stderr": 0.023222756797435115, "acc_norm": 0.7877813504823151, "acc_norm_stderr": 0.023222756797435115 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8333333333333334, "acc_stderr": 0.020736358408060002, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.020736358408060002 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5957446808510638, "acc_stderr": 0.02927553215970472, "acc_norm": 0.5957446808510638, "acc_norm_stderr": 0.02927553215970472 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5827900912646675, "acc_stderr": 0.012593959992906427, "acc_norm": 0.5827900912646675, "acc_norm_stderr": 0.012593959992906427 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.75, "acc_stderr": 0.026303648393696036, "acc_norm": 0.75, "acc_norm_stderr": 0.026303648393696036 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7728758169934641, "acc_stderr": 0.016949853279212373, "acc_norm": 0.7728758169934641, "acc_norm_stderr": 0.016949853279212373 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7454545454545455, "acc_stderr": 0.041723430387053825, "acc_norm": 0.7454545454545455, "acc_norm_stderr": 0.041723430387053825 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7918367346938775, "acc_stderr": 0.0259911176728133, "acc_norm": 0.7918367346938775, "acc_norm_stderr": 0.0259911176728133 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8805970149253731, "acc_stderr": 0.02292879327721974, "acc_norm": 0.8805970149253731, "acc_norm_stderr": 0.02292879327721974 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.5301204819277109, "acc_stderr": 0.03885425420866767, "acc_norm": 0.5301204819277109, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8771929824561403, "acc_stderr": 0.02517298435015575, "acc_norm": 0.8771929824561403, "acc_norm_stderr": 0.02517298435015575 }, "harness|truthfulqa:mc|0": { "mc1": 0.46511627906976744, "mc1_stderr": 0.017460849975873965, "mc2": 0.6446460697306154, "mc2_stderr": 0.014753033588623255 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7013170123100281, -0.8587830066680908, 0.313723087310791, 0.17472216486930847, -0.12332380563020706, -0.008483264595270157, 0.09325172007083893, -0.20048223435878754, 0.5492393374443054, -0.031502287834882736, -0.5350324511528015, -0.7173213362693787, -0.4683588743209839, 0.214188680052...
null
null
null
null
null
null
null
null
null
null
null
null
null
nova-sqoin/hotel_dataset_llama2
nova-sqoin
2023-11-22T09:27:23Z
131
0
null
[ "size_categories:n<1K", "language:pt", "language:en", "license:apache-2.0", "region:us" ]
2023-11-22T09:27:23Z
2023-11-07T09:14:06.000Z
2023-11-07T09:14:06
--- license: apache-2.0 language: - pt - en size_categories: - n<1K ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
aakanksha19/cdr_bigbio_processed
aakanksha19
2023-11-21T05:27:54Z
131
0
null
[ "license:other", "region:us" ]
2023-11-21T05:27:54Z
2023-11-21T05:24:04.000Z
2023-11-21T05:24:04
--- license: other license_name: public-domain-mark-1.0 license_link: https://creativecommons.org/publicdomain/mark/1.0/deed.en ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
tomekkorbak/detoxify-pile-chunk3-250000-300000
tomekkorbak
2022-10-06T03:07:35Z
130
0
null
[ "region:us" ]
2022-10-06T03:07:35Z
2022-10-03T19:52:11.000Z
2022-10-03T19:52:11
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622264862060547, 0.43461528420448303, -0.52829909324646, 0.7012971639633179, 0.7915720343589783, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104477167129517, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
Anthropic/llm_global_opinions
Anthropic
2023-06-29T00:46:48Z
130
24
null
[ "size_categories:1K<n<10K", "language:en", "license:cc-by-nc-sa-4.0", "arxiv:2306.16388", "region:us" ]
2023-06-29T00:46:48Z
2023-06-26T07:47:41.000Z
2023-06-26T07:47:41
--- license: cc-by-nc-sa-4.0 language: - en size_categories: - 1K<n<10K --- # Dataset Card for GlobalOpinionQA ## Dataset Summary The data contains a subset of survey questions about global issues and opinions adapted from the [World Values Survey](https://www.worldvaluessurvey.org/) and [Pew Global Attitudes Survey](https://www.pewresearch.org/). The data is further described in the paper: [Towards Measuring the Representation of Subjective Global Opinions in Language Models](https://arxiv.org/abs/2306.16388). ## Purpose In our paper, we use this dataset to analyze the opinions that large language models (LLMs) reflect on complex global issues. Our goal is to gain insights into potential biases in AI systems by evaluating their performance on subjective topics. ## Data Format The data is in a CSV file with the following columns: - question: The text of the survey question. - selections: A dictionary where the key is the country name and the value is a list of percentages of respondents who selected each answer option for that country. - options: A list of the answer options for the given question. - source: GAS/WVS depending on whether the question is coming from Global Attitudes Survey or World Value Survey. ## Usage ```python from datasets import load_dataset # Loading the data dataset = load_dataset("Anthropic/llm_global_opinions") ``` ## Disclaimer We recognize the limitations in using this dataset to evaluate LLMs, as they were not specifically designed for this purpose. Therefore, we acknowledge that the construct validity of these datasets when applied to LLMs may be limited. ## Contact For questions, you can email esin at anthropic dot com ## Citation If you would like to cite our work or data, you may use the following bibtex citation: ``` @misc{durmus2023measuring, title={Towards Measuring the Representation of Subjective Global Opinions in Language Models}, author={Esin Durmus and Karina Nyugen and Thomas I. Liao and Nicholas Schiefer and Amanda Askell and Anton Bakhtin and Carol Chen and Zac Hatfield-Dodds and Danny Hernandez and Nicholas Joseph and Liane Lovitt and Sam McCandlish and Orowa Sikder and Alex Tamkin and Janel Thamkul and Jared Kaplan and Jack Clark and Deep Ganguli}, year={2023}, eprint={2306.16388}, archivePrefix={arXiv}, primaryClass={cs.CL} } ```
[ -0.46284863352775574, -0.5425790548324585, 0.38582906126976013, 0.022902963683009148, -0.16085295379161835, -0.17621175944805145, -0.4157320261001587, -0.4098604619503021, 0.16575250029563904, 0.3419359028339386, -0.36724746227264404, -0.7026264667510986, -0.4760793447494507, 0.10072246193...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_julianweng__Llama-2-7b-chat-orcah
open-llm-leaderboard
2023-09-17T17:33:16Z
130
0
null
[ "region:us" ]
2023-09-17T17:33:16Z
2023-08-18T11:19:00.000Z
2023-08-18T11:19:00
--- pretty_name: Evaluation run of julianweng/Llama-2-7b-chat-orcah dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [julianweng/Llama-2-7b-chat-orcah](https://huggingface.co/julianweng/Llama-2-7b-chat-orcah)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_julianweng__Llama-2-7b-chat-orcah\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-09-17T17:33:03.536328](https://huggingface.co/datasets/open-llm-leaderboard/details_julianweng__Llama-2-7b-chat-orcah/blob/main/results_2023-09-17T17-33-03.536328.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.02936241610738255,\n\ \ \"em_stderr\": 0.0017288770032803159,\n \"f1\": 0.07552432885906037,\n\ \ \"f1_stderr\": 0.0020587215501161925,\n \"acc\": 0.3737288120380116,\n\ \ \"acc_stderr\": 0.00900957367793152\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.02936241610738255,\n \"em_stderr\": 0.0017288770032803159,\n\ \ \"f1\": 0.07552432885906037,\n \"f1_stderr\": 0.0020587215501161925\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.03790750568612585,\n \ \ \"acc_stderr\": 0.005260333907798431\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7095501183898973,\n \"acc_stderr\": 0.01275881344806461\n\ \ }\n}\n```" repo_url: https://huggingface.co/julianweng/Llama-2-7b-chat-orcah leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|arc:challenge|25_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-07-24T11:44:40.236710.parquet' - config_name: harness_drop_3 data_files: - split: 2023_09_17T17_33_03.536328 path: - '**/details_harness|drop|3_2023-09-17T17-33-03.536328.parquet' - split: latest path: - '**/details_harness|drop|3_2023-09-17T17-33-03.536328.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_09_17T17_33_03.536328 path: - '**/details_harness|gsm8k|5_2023-09-17T17-33-03.536328.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-09-17T17-33-03.536328.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hellaswag|10_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:44:40.236710.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-07-24T11:44:40.236710.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_07_24T11_44_40.236710 path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T11:44:40.236710.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-07-24T11:44:40.236710.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_09_17T17_33_03.536328 path: - '**/details_harness|winogrande|5_2023-09-17T17-33-03.536328.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-09-17T17-33-03.536328.parquet' - config_name: results data_files: - split: 2023_07_24T11_44_40.236710 path: - results_2023-07-24T11:44:40.236710.parquet - split: 2023_09_17T17_33_03.536328 path: - results_2023-09-17T17-33-03.536328.parquet - split: latest path: - results_2023-09-17T17-33-03.536328.parquet --- # Dataset Card for Evaluation run of julianweng/Llama-2-7b-chat-orcah ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/julianweng/Llama-2-7b-chat-orcah - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [julianweng/Llama-2-7b-chat-orcah](https://huggingface.co/julianweng/Llama-2-7b-chat-orcah) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_julianweng__Llama-2-7b-chat-orcah", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T17:33:03.536328](https://huggingface.co/datasets/open-llm-leaderboard/details_julianweng__Llama-2-7b-chat-orcah/blob/main/results_2023-09-17T17-33-03.536328.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.02936241610738255, "em_stderr": 0.0017288770032803159, "f1": 0.07552432885906037, "f1_stderr": 0.0020587215501161925, "acc": 0.3737288120380116, "acc_stderr": 0.00900957367793152 }, "harness|drop|3": { "em": 0.02936241610738255, "em_stderr": 0.0017288770032803159, "f1": 0.07552432885906037, "f1_stderr": 0.0020587215501161925 }, "harness|gsm8k|5": { "acc": 0.03790750568612585, "acc_stderr": 0.005260333907798431 }, "harness|winogrande|5": { "acc": 0.7095501183898973, "acc_stderr": 0.01275881344806461 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3378402590751648, -0.7598088979721069, 0.16672195494174957, 0.26449844241142273, -0.19231681525707245, 0.13816803693771362, -0.41102534532546997, -0.3233392536640167, 0.47338205575942993, 0.595970094203949, -0.7423812747001648, -1.0439727306365967, -0.6954483389854431, 0.155515864491462...
null
null
null
null
null
null
null
null
null
null
null
null
null
vlsp-2023-vllm/en-to-vi-formal-informal-tranlations
vlsp-2023-vllm
2023-10-30T02:59:09Z
130
0
null
[ "region:us" ]
2023-10-30T02:59:09Z
2023-09-12T04:17:33.000Z
2023-09-12T04:17:33
--- dataset_info: features: - name: en dtype: string - name: vi dtype: string - name: fewshot_samples list: - name: en dtype: string - name: vi dtype: string splits: - name: val num_bytes: 178154 num_examples: 160 - name: test num_bytes: 175339 num_examples: 160 download_size: 124988 dataset_size: 353493 --- # Few-shot Translation ## Install To install `lm-eval` from the github repository main branch, run: ```bash git clone https://github.com/hieunguyen1053/lm-evaluation-harness cd lm-evaluation-harness pip install -e . ``` ## Basic Usage > **Note**: When reporting results from eval harness, please include the task versions (shown in `results["versions"]`) for reproducibility. This allows bug fixes to tasks while also ensuring that previously reported scores are reproducible. See the [Task Versioning](#task-versioning) section for more info. ### Hugging Face `transformers` To evaluate a model hosted on the [HuggingFace Hub](https://huggingface.co/models) (e.g. vlsp-2023-vllm/hoa-1b4) on `hellaswag_vi` you can use the following command: ```bash python main.py \ --model hf-causal \ --model_args pretrained=vlsp-2023-vllm/hoa-1b4 \ --tasks translation_vi \ --batch_size auto \ --device cuda:0 ``` Additional arguments can be provided to the model constructor using the `--model_args` flag. Most notably, this supports the common practice of using the `revisions` feature on the Hub to store partially trained checkpoints, or to specify the datatype for running a model: ```bash python main.py \ --model hf-causal \ --model_args pretrained=vlsp-2023-vllm/hoa-1b4,revision=step100000,dtype="float" \ --tasks translation_vi \ --device cuda:0 ``` To evaluate models that are loaded via `AutoSeq2SeqLM` in Huggingface, you instead use `hf-seq2seq`. *To evaluate (causal) models across multiple GPUs, use `--model hf-causal-experimental`* > **Warning**: Choosing the wrong model may result in erroneous outputs despite not erroring.
[ -0.4534435570240021, -0.8438851833343506, 0.6965327262878418, 0.44495174288749695, -0.1461096554994583, -0.24055390059947968, -0.04122724384069443, -0.16196000576019287, 0.15076468884944916, 0.3242693841457367, -0.7110030651092529, -0.34817126393318176, -0.6033231019973755, 0.0886553749442...
null
null
null
null
null
null
null
null
null
null
null
null
null
nova-sqoin/hotel_dataset
nova-sqoin
2023-11-08T10:44:54Z
130
0
null
[ "size_categories:n<1K", "language:pt", "language:en", "license:apache-2.0", "region:us" ]
2023-11-08T10:44:54Z
2023-11-08T10:43:43.000Z
2023-11-08T10:43:43
--- license: apache-2.0 language: - pt - en size_categories: - n<1K ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
AhmedSSoliman/QRCD
AhmedSSoliman
2022-03-06T18:58:06Z
129
0
null
[ "region:us" ]
2022-03-06T18:58:06Z
2022-03-05T20:46:25.000Z
2022-03-05T20:46:25
This dataset is presented for the task of Answering Questions on the Holy Qur'an. https://sites.google.com/view/quran-qa-2022 QRCD (Qur'anic Reading Comprehension Dataset) is composed of 1,093 tuples of question-passage pairs that are coupled with their extracted answers to constitute 1,337 question-passage-answer triplets. It is split into training (65%), development (10%), and test (25%) sets. QRCD is a JSON Lines (JSONL) file; each line is a JSON object that comprises a question-passage pair, along with its answers extracted from the accompanying passage. The dataset adopts the format shown below. The sample below has two JSON objects, one for each of the above two questions.
[ -0.4440065324306488, -0.7845544815063477, 0.2065732777118683, 0.0228401031345129, -0.5299262404441833, 0.2962125241756439, 0.21928885579109192, -0.0542166531085968, -0.22743269801139832, 0.9147359728813171, -0.6380854249000549, -0.42643824219703674, -0.17722296714782715, 0.2812179923057556...
null
null
null
null
null
null
null
null
null
null
null
null
null
ChristophSchuhmann/improved_aesthetics_5plus
ChristophSchuhmann
2022-08-11T12:46:57Z
129
15
null
[ "license:apache-2.0", "region:us" ]
2022-08-11T12:46:57Z
2022-08-11T12:06:13.000Z
2022-08-11T12:06:13
--- license: apache-2.0 ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
tomekkorbak/detoxify-pile-chunk3-300000-350000
tomekkorbak
2022-10-03T18:38:38Z
129
0
null
[ "region:us" ]
2022-10-03T18:38:38Z
2022-10-03T18:38:31.000Z
2022-10-03T18:38:31
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
reasoning-machines/gsm-hard
reasoning-machines
2023-01-17T03:21:10Z
129
13
null
[ "task_categories:text2text-generation", "language_creators:crowdsourced", "language_creators:expert-generated", "multilinguality:monolingual", "size_categories:unknown", "source_datasets:gsm8k (https://huggingface.co/datasets/gsm8k)", "language:code", "license:mit", "math_reasoning", "symbolic_rea...
2023-01-17T03:21:10Z
2023-01-17T03:05:50.000Z
2023-01-17T03:05:50
--- annotations_creators: [] language_creators: - crowdsourced - expert-generated language: - code license: - mit multilinguality: - monolingual size_categories: - unknown source_datasets: - gsm8k (https://huggingface.co/datasets/gsm8k) task_categories: - text2text-generation task_ids: [] pretty_name: gsm-hard tags: - math_reasoning - symbolic_reasoning --- ## Dataset Description - **Repository:** https://reasonwithpal.com/ - **Paper:** [PaL: Program-Aided Language Model](https://arxiv.org/abs/2211.10435) ### Dataset Summary This is the harder version of gsm8k math reasoning dataset (https://huggingface.co/datasets/gsm8k). We construct this dataset by replacing the numbers in the questions of GSM8K with larger numbers that are less common.  ### Supported Tasks and Leaderboards This dataset is used to evaluate math reasoning ### Languages English - Numbers ## Dataset Structure ```python dataset = load_dataset("reasoning-machines/gsm-hard") DatasetDict({ train: Dataset({ features: ['input', 'code', 'target'], num_rows: 1319 }) }) ``` ### Data Fields train/dev/test: - input: The question - code: The corresponding code solution to the question - target: The answer ### Citation Information ``` @article{gao2022pal, title={PAL: Program-aided Language Models}, author={Gao, Luyu and Madaan, Aman and Zhou, Shuyan and Alon, Uri and Liu, Pengfei and Yang, Yiming and Callan, Jamie and Neubig, Graham}, journal={arXiv preprint arXiv:2211.10435}, year={2022} } ```
[ -0.43402454257011414, -0.5419299602508545, 0.40609806776046753, 0.3314664661884308, -0.18342670798301697, -0.3897857964038849, -0.39361950755119324, 0.1266249120235443, -0.09706126153469086, 0.5833651423454285, -0.5983653664588928, -0.45017877221107483, -0.49818697571754456, 0.252539038658...
null
null
null
null
null
null
null
null
null
null
null
null
null
MultiCoNER/multiconer_v2
MultiCoNER
2023-07-06T18:37:15Z
129
7
null
[ "task_categories:token-classification", "size_categories:100K<n<1M", "language:bn", "language:zh", "language:de", "language:en", "language:es", "language:fa", "language:fr", "language:hi", "language:it", "language:pt", "language:sv", "language:uk", "license:cc-by-4.0", "multiconer", ...
2023-07-06T18:37:15Z
2023-03-01T00:57:16.000Z
2023-03-01T00:57:16
--- license: cc-by-4.0 task_categories: - token-classification language: - bn - zh - de - en - es - fa - fr - hi - it - pt - sv - uk tags: - multiconer - ner - multilingual - named entity recognition - fine-grained ner size_categories: - 100K<n<1M --- # Dataset Card for Multilingual Complex Named Entity Recognition (MultiCoNER) ## Dataset Description - **Homepage:** https://multiconer.github.io - **Repository:** - **Paper:** - **Leaderboard:** https://multiconer.github.io/results, https://codalab.lisn.upsaclay.fr/competitions/10025 - **Point of Contact:** https://multiconer.github.io/organizers ### Dataset Summary The tagset of MultiCoNER is a fine-grained tagset. The fine to coarse level mapping of the tags are as follows: * Location (LOC) : Facility, OtherLOC, HumanSettlement, Station * Creative Work (CW) : VisualWork, MusicalWork, WrittenWork, ArtWork, Software * Group (GRP) : MusicalGRP, PublicCORP, PrivateCORP, AerospaceManufacturer, SportsGRP, CarManufacturer, ORG * Person (PER) : Scientist, Artist, Athlete, Politician, Cleric, SportsManager, OtherPER * Product (PROD) : Clothing, Vehicle, Food, Drink, OtherPROD * Medical (MED) : Medication/Vaccine, MedicalProcedure, AnatomicalStructure, Symptom, Disease ### Supported Tasks and Leaderboards The final leaderboard of the shared task is available <a href="https://multiconer.github.io/results" target="_blank">here</a>. ### Languages Supported languages are Bangla, Chinese, English, Spanish, Farsi, French, German, Hindi, Italian, Portuguese, Swedish, Ukrainian. ## Dataset Structure The dataset follows CoNLL format. ### Data Instances Here are some examples in different languages: * Bangla: [লিটল মিক্স | MusicalGrp] এ যোগদানের আগে তিনি [পিৎজা হাট | ORG] এ ওয়েট্রেস হিসাবে কাজ করেছিলেন। * Chinese: 它的纤维穿过 [锁骨 | AnatomicalStructure] 并沿颈部侧面倾斜向上和内侧. * English: [wes anderson | Artist]'s film [the grand budapest hotel | VisualWork] opened the festival . * Farsi: است] ناگویا |HumanSettlement] مرکزاین استان شهر * French: l [amiral de coligny | Politician] réussit à s y glisser . * German: in [frühgeborenes | Disease] führt dies zu [irds | Symptom] . * Hindi: १७९६ में उन्हें [शाही स्वीडिश विज्ञान अकादमी | Facility] का सदस्य चुना गया। * Italian: è conservato nel [rijksmuseum | Facility] di [amsterdam | HumanSettlement] . * Portuguese: também é utilizado para se fazer [licor | Drink] e [vinhos | Drink]. * Spanish: fue superado por el [aon center | Facility] de [los ángeles | HumanSettlement] . * Swedish: [tom hamilton | Artist] amerikansk musiker basist i [aerosmith | MusicalGRP] . * Ukrainian: назва альбому походить з роману « [кінець дитинства | WrittenWork] » англійського письменника [артура кларка | Artist] . ### Data Fields The data has two fields. One is the token and another is the label. Here is an example from the English data. ``` # id f5458a3a-cd23-4df4-8384-4e23fe33a66b domain=en doris _ _ B-Artist day _ _ I-Artist included _ _ O in _ _ O the _ _ O album _ _ O billy _ _ B-MusicalWork rose _ _ I-MusicalWork 's _ _ I-MusicalWork jumbo _ _ I-MusicalWork ``` ### Data Splits Train, Dev, and Test splits are provided ## Dataset Creation TBD ## Loading the Dataset ```python from datasets import load_dataset english_data = load_dataset('MultiCoNER/multiconer_v2', 'English (EN)') ``` ### Licensing Information CC BY 4.0 ### Citation Information ``` @inproceedings{multiconer2-report, title={{SemEval-2023 Task 2: Fine-grained Multilingual Named Entity Recognition (MultiCoNER 2)}}, author={Fetahu, Besnik and Kar, Sudipta and Chen, Zhiyu and Rokhlenko, Oleg and Malmasi, Shervin}, booktitle={Proceedings of the 17th International Workshop on Semantic Evaluation (SemEval-2023)}, year={2023}, publisher={Association for Computational Linguistics}, } @article{multiconer2-data, title={{MultiCoNER v2: a Large Multilingual dataset for Fine-grained and Noisy Named Entity Recognition}}, author={Fetahu, Besnik and Chen, Zhiyu and Kar, Sudipta and Rokhlenko, Oleg and Malmasi, Shervin}, year={2023}, } ```
[ -0.5681772232055664, -0.5472185015678406, 0.13240674138069153, 0.3441118001937866, -0.33636489510536194, 0.10138946026563644, -0.6027072072029114, -0.8035322427749634, 0.46672192215919495, 0.20153604447841644, -0.5029615759849548, -0.8848946690559387, -0.6008736491203308, 0.306878477334976...
null
null
null
null
null
null
null
null
null
null
null
null
null
danielpark/MQuAD-v1
danielpark
2023-04-07T12:21:48Z
129
2
null
[ "task_categories:question-answering", "task_categories:text-generation", "language:en", "language:ko", "license:apache-2.0", "biology", "region:us" ]
2023-04-07T12:21:48Z
2023-04-06T06:31:15.000Z
2023-04-06T06:31:15
--- license: apache-2.0 task_categories: - question-answering - text-generation language: - en - ko tags: - biology pretty_name: Medical domain QA dataset for training a medical chatbot. --- # MQuAD The Medical Question and Answering dataset(MQuAD) has been refined, including the following datasets. You can download it through the Hugging Face dataset. Use the DATASETS method as follows. ## Quick Guide ```python from datasets import load_dataset dataset = load_dataset("danielpark/MQuAD-v1") ``` Medical Q/A datasets gathered from the following websites. - eHealth Forum - iCliniq - Question Doctors - WebMD Data was gathered at the 5th of May 2017. The MQuAD provides embedded question and answer arrays in string format, so it is recommended to convert the string-formatted arrays into float format as follows. This measure has been applied to save resources and time used for embedding. ```python from datasets import load_dataset from utilfunction import col_convert import pandas as pd qa = load_dataset("danielpark/MQuAD-v1", "csv") df_qa = pd.DataFrame(qa['train']) df_qa = col_convert(df_qa, ['Q_FFNN_embeds', 'A_FFNN_embeds']) ```
[ -0.47276434302330017, -0.841637909412384, 0.2535485625267029, 0.34965917468070984, -0.26535725593566895, -0.040871985256671906, 0.0758180022239685, 0.09703262150287628, 0.755894660949707, 0.38553979992866516, -0.6626506447792053, -0.32984116673469543, -0.43517735600471497, 0.18800212442874...
null
null
null
null
null
null
null
null
null
null
null
null
null
teknium/GPTeacher-General-Instruct
teknium
2023-04-29T23:27:46Z
129
29
null
[ "license:mit", "region:us" ]
2023-04-29T23:27:46Z
2023-04-29T23:22:58.000Z
2023-04-29T23:22:58
--- license: mit --- GPTeacher General-Instruct dataset is GPT-4 Generated self-instruct dataset. There are multiple versions, with more or less similarity reductions. The dedupe only dataset contains 18194 entries, with less the more similarity is reduced. Format is identical to alpaca's, with a varyiable mix of Instruction/Input/Response, and Instruction/NullInput/Response fields. Learn more on github here: https://github.com/teknium1/GPTeacher
[ -0.5138243436813354, -0.8214493989944458, 0.5366231799125671, -0.06347023695707321, 0.012619838118553162, -0.36176833510398865, 0.15980850160121918, -0.2506568133831024, 0.10128790140151978, 0.6704524159431458, -0.8561649322509766, -0.6494719386100769, -0.679768979549408, 0.000314806733513...
null
null
null
null
null
null
null
null
null
null
null
null
null
DongfuTingle/FeTaQA
DongfuTingle
2023-05-08T15:52:42Z
129
4
null
[ "task_categories:table-question-answering", "task_categories:table-to-text", "task_categories:question-answering", "size_categories:1K<n<10K", "language:en", "license:mit", "region:us" ]
2023-05-08T15:52:42Z
2023-05-08T15:33:08.000Z
2023-05-08T15:33:08
--- license: mit task_categories: - table-question-answering - table-to-text - question-answering language: - en pretty_name: fetaqa size_categories: - 1K<n<10K --- This repo is the unofficial FeTA-QA dataset from paper [FeTaQA: Free-form Table Question Answering](https://direct.mit.edu/tacl/article/doi/10.1162/tacl_a_00446/109273/FeTaQA-Free-form-Table-Question-Answering). The original purpose to make it easier for users to download and use dataset. All the data is publicly avaliable on [their offical Github site](https://github.com/Yale-LILY/FeTaQA) If there is anything wrong, please raise an issue in the community and I will fix it if I am available.
[ -0.26119381189346313, -0.5687746405601501, 0.6721888780593872, 0.07673666626214981, 0.01713760383427143, 0.09976741671562195, 0.3292165994644165, -0.21098338067531586, 0.4669467508792877, 0.675868034362793, -0.6880009174346924, -0.4444502890110016, 0.0857744887471199, 0.20796220004558563, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
SotirisLegkas/off_hate_toxic
SotirisLegkas
2023-05-11T12:12:18Z
129
0
null
[ "region:us" ]
2023-05-11T12:12:18Z
2023-05-08T23:46:04.000Z
2023-05-08T23:46:04
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
Abzu/dolly_hhrlhf
Abzu
2023-06-04T19:33:11Z
129
3
null
[ "task_categories:question-answering", "task_categories:text2text-generation", "language:en", "license:cc-by-sa-3.0", "region:us" ]
2023-06-04T19:33:11Z
2023-05-25T08:34:30.000Z
2023-05-25T08:34:30
--- dataset_info: features: - name: prompt dtype: string - name: response dtype: string splits: - name: train num_bytes: 22346337.075312525 num_examples: 35205 - name: test num_bytes: 2483137.924687476 num_examples: 3912 download_size: 16025539 dataset_size: 24829475 license: cc-by-sa-3.0 task_categories: - question-answering - text2text-generation language: - en --- # Dataset Card for "dolly_hhrlhf" This is the dataset from mosaic mosaicml/dolly_hhrlhf removing some duplicates found. [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.4153252840042114, -0.45235663652420044, -0.11464602500200272, 0.352505624294281, -0.32832393050193787, 0.09877919405698776, 0.5401520133018494, -0.0511205680668354, 0.8923934698104858, 1.0581170320510864, -0.9399067759513855, -0.8424868583679199, -0.8747255206108093, 0.03168965503573418...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_oh-yeontaek__llama-2-70B-LoRA-assemble-v2
open-llm-leaderboard
2023-09-15T16:07:38Z
129
0
null
[ "region:us" ]
2023-09-15T16:07:38Z
2023-09-15T16:06:34.000Z
2023-09-15T16:06:34
--- pretty_name: Evaluation run of oh-yeontaek/llama-2-70B-LoRA-assemble-v2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [oh-yeontaek/llama-2-70B-LoRA-assemble-v2](https://huggingface.co/oh-yeontaek/llama-2-70B-LoRA-assemble-v2)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_oh-yeontaek__llama-2-70B-LoRA-assemble-v2\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-15T16:06:18.387785](https://huggingface.co/datasets/open-llm-leaderboard/details_oh-yeontaek__llama-2-70B-LoRA-assemble-v2/blob/main/results_2023-09-15T16-06-18.387785.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6931761249212157,\n\ \ \"acc_stderr\": 0.031300161246260914,\n \"acc_norm\": 0.6971025131327819,\n\ \ \"acc_norm_stderr\": 0.03127024725201448,\n \"mc1\": 0.4663402692778458,\n\ \ \"mc1_stderr\": 0.01746379386716811,\n \"mc2\": 0.6478807414957388,\n\ \ \"mc2_stderr\": 0.014914964973799093\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.6834470989761092,\n \"acc_stderr\": 0.013592431519068079,\n\ \ \"acc_norm\": 0.7184300341296929,\n \"acc_norm_stderr\": 0.013143376735009022\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6721768571997611,\n\ \ \"acc_stderr\": 0.0046846063106423304,\n \"acc_norm\": 0.8688508265285799,\n\ \ \"acc_norm_stderr\": 0.00336873543416138\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \ \ \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n\ \ \"acc_stderr\": 0.042039210401562783,\n \"acc_norm\": 0.6148148148148148,\n\ \ \"acc_norm_stderr\": 0.042039210401562783\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.7828947368421053,\n \"acc_stderr\": 0.03355045304882924,\n\ \ \"acc_norm\": 0.7828947368421053,\n \"acc_norm_stderr\": 0.03355045304882924\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.75,\n\ \ \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \ \ \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.7433962264150943,\n \"acc_stderr\": 0.02688064788905199,\n\ \ \"acc_norm\": 0.7433962264150943,\n \"acc_norm_stderr\": 0.02688064788905199\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8194444444444444,\n\ \ \"acc_stderr\": 0.03216600808802267,\n \"acc_norm\": 0.8194444444444444,\n\ \ \"acc_norm_stderr\": 0.03216600808802267\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \ \ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\"\ : 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n\ \ \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n\ \ \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105654,\n\ \ \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n\ \ \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.6553191489361702,\n \"acc_stderr\": 0.031068985963122145,\n\ \ \"acc_norm\": 0.6553191489361702,\n \"acc_norm_stderr\": 0.031068985963122145\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.41228070175438597,\n\ \ \"acc_stderr\": 0.04630653203366595,\n \"acc_norm\": 0.41228070175438597,\n\ \ \"acc_norm_stderr\": 0.04630653203366595\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.6344827586206897,\n \"acc_stderr\": 0.04013124195424386,\n\ \ \"acc_norm\": 0.6344827586206897,\n \"acc_norm_stderr\": 0.04013124195424386\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.4656084656084656,\n \"acc_stderr\": 0.02569032176249384,\n \"\ acc_norm\": 0.4656084656084656,\n \"acc_norm_stderr\": 0.02569032176249384\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n\ \ \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n\ \ \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \ \ \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.832258064516129,\n\ \ \"acc_stderr\": 0.021255464065371314,\n \"acc_norm\": 0.832258064516129,\n\ \ \"acc_norm_stderr\": 0.021255464065371314\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.5467980295566502,\n \"acc_stderr\": 0.03502544650845872,\n\ \ \"acc_norm\": 0.5467980295566502,\n \"acc_norm_stderr\": 0.03502544650845872\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\"\ : 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.8303030303030303,\n \"acc_stderr\": 0.02931118867498312,\n\ \ \"acc_norm\": 0.8303030303030303,\n \"acc_norm_stderr\": 0.02931118867498312\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.9040404040404041,\n \"acc_stderr\": 0.020984808610047933,\n \"\ acc_norm\": 0.9040404040404041,\n \"acc_norm_stderr\": 0.020984808610047933\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.927461139896373,\n \"acc_stderr\": 0.018718998520678178,\n\ \ \"acc_norm\": 0.927461139896373,\n \"acc_norm_stderr\": 0.018718998520678178\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.6974358974358974,\n \"acc_stderr\": 0.02329088805377272,\n \ \ \"acc_norm\": 0.6974358974358974,\n \"acc_norm_stderr\": 0.02329088805377272\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.32592592592592595,\n \"acc_stderr\": 0.028578348365473072,\n \ \ \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.028578348365473072\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.7815126050420168,\n \"acc_stderr\": 0.026841514322958934,\n\ \ \"acc_norm\": 0.7815126050420168,\n \"acc_norm_stderr\": 0.026841514322958934\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.47019867549668876,\n \"acc_stderr\": 0.040752249922169775,\n \"\ acc_norm\": 0.47019867549668876,\n \"acc_norm_stderr\": 0.040752249922169775\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.8844036697247707,\n \"acc_stderr\": 0.013708749534172636,\n \"\ acc_norm\": 0.8844036697247707,\n \"acc_norm_stderr\": 0.013708749534172636\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.5648148148148148,\n \"acc_stderr\": 0.033812000056435254,\n \"\ acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.033812000056435254\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.8970588235294118,\n \"acc_stderr\": 0.021328337570804365,\n \"\ acc_norm\": 0.8970588235294118,\n \"acc_norm_stderr\": 0.021328337570804365\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.8776371308016878,\n \"acc_stderr\": 0.02133174182974679,\n \ \ \"acc_norm\": 0.8776371308016878,\n \"acc_norm_stderr\": 0.02133174182974679\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7668161434977578,\n\ \ \"acc_stderr\": 0.028380391147094706,\n \"acc_norm\": 0.7668161434977578,\n\ \ \"acc_norm_stderr\": 0.028380391147094706\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.03336820338476073,\n\ \ \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.03336820338476073\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.8512396694214877,\n \"acc_stderr\": 0.03248470083807194,\n \"\ acc_norm\": 0.8512396694214877,\n \"acc_norm_stderr\": 0.03248470083807194\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8333333333333334,\n\ \ \"acc_stderr\": 0.03602814176392645,\n \"acc_norm\": 0.8333333333333334,\n\ \ \"acc_norm_stderr\": 0.03602814176392645\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.8220858895705522,\n \"acc_stderr\": 0.03004735765580663,\n\ \ \"acc_norm\": 0.8220858895705522,\n \"acc_norm_stderr\": 0.03004735765580663\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n\ \ \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n\ \ \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.0376017800602662,\n\ \ \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.0376017800602662\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n\ \ \"acc_stderr\": 0.02058849131609238,\n \"acc_norm\": 0.8888888888888888,\n\ \ \"acc_norm_stderr\": 0.02058849131609238\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.71,\n \"acc_stderr\": 0.04560480215720684,\n \ \ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.04560480215720684\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8659003831417624,\n\ \ \"acc_stderr\": 0.012185528166499983,\n \"acc_norm\": 0.8659003831417624,\n\ \ \"acc_norm_stderr\": 0.012185528166499983\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7774566473988439,\n \"acc_stderr\": 0.02239421566194282,\n\ \ \"acc_norm\": 0.7774566473988439,\n \"acc_norm_stderr\": 0.02239421566194282\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5597765363128492,\n\ \ \"acc_stderr\": 0.01660256461504993,\n \"acc_norm\": 0.5597765363128492,\n\ \ \"acc_norm_stderr\": 0.01660256461504993\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.02526169121972949,\n\ \ \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.02526169121972949\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7588424437299035,\n\ \ \"acc_stderr\": 0.024296594034763426,\n \"acc_norm\": 0.7588424437299035,\n\ \ \"acc_norm_stderr\": 0.024296594034763426\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.022409674547304168,\n\ \ \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.022409674547304168\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.5531914893617021,\n \"acc_stderr\": 0.02965823509766691,\n \ \ \"acc_norm\": 0.5531914893617021,\n \"acc_norm_stderr\": 0.02965823509766691\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5586701434159062,\n\ \ \"acc_stderr\": 0.012682016335646678,\n \"acc_norm\": 0.5586701434159062,\n\ \ \"acc_norm_stderr\": 0.012682016335646678\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.026799562024887653,\n\ \ \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.026799562024887653\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.7434640522875817,\n \"acc_stderr\": 0.017667841612379005,\n \ \ \"acc_norm\": 0.7434640522875817,\n \"acc_norm_stderr\": 0.017667841612379005\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7454545454545455,\n\ \ \"acc_stderr\": 0.041723430387053825,\n \"acc_norm\": 0.7454545454545455,\n\ \ \"acc_norm_stderr\": 0.041723430387053825\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7795918367346939,\n \"acc_stderr\": 0.026537045312145298,\n\ \ \"acc_norm\": 0.7795918367346939,\n \"acc_norm_stderr\": 0.026537045312145298\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8855721393034826,\n\ \ \"acc_stderr\": 0.022509345325101706,\n \"acc_norm\": 0.8855721393034826,\n\ \ \"acc_norm_stderr\": 0.022509345325101706\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \ \ \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n\ \ \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n\ \ \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8596491228070176,\n \"acc_stderr\": 0.0266405825391332,\n\ \ \"acc_norm\": 0.8596491228070176,\n \"acc_norm_stderr\": 0.0266405825391332\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4663402692778458,\n\ \ \"mc1_stderr\": 0.01746379386716811,\n \"mc2\": 0.6478807414957388,\n\ \ \"mc2_stderr\": 0.014914964973799093\n }\n}\n```" repo_url: https://huggingface.co/oh-yeontaek/llama-2-70B-LoRA-assemble-v2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|arc:challenge|25_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hellaswag|10_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-15T16-06-18.387785.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-management|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-15T16-06-18.387785.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_15T16_06_18.387785 path: - '**/details_harness|truthfulqa:mc|0_2023-09-15T16-06-18.387785.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-15T16-06-18.387785.parquet' - config_name: results data_files: - split: 2023_09_15T16_06_18.387785 path: - results_2023-09-15T16-06-18.387785.parquet - split: latest path: - results_2023-09-15T16-06-18.387785.parquet --- # Dataset Card for Evaluation run of oh-yeontaek/llama-2-70B-LoRA-assemble-v2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/oh-yeontaek/llama-2-70B-LoRA-assemble-v2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [oh-yeontaek/llama-2-70B-LoRA-assemble-v2](https://huggingface.co/oh-yeontaek/llama-2-70B-LoRA-assemble-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_oh-yeontaek__llama-2-70B-LoRA-assemble-v2", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-15T16:06:18.387785](https://huggingface.co/datasets/open-llm-leaderboard/details_oh-yeontaek__llama-2-70B-LoRA-assemble-v2/blob/main/results_2023-09-15T16-06-18.387785.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6931761249212157, "acc_stderr": 0.031300161246260914, "acc_norm": 0.6971025131327819, "acc_norm_stderr": 0.03127024725201448, "mc1": 0.4663402692778458, "mc1_stderr": 0.01746379386716811, "mc2": 0.6478807414957388, "mc2_stderr": 0.014914964973799093 }, "harness|arc:challenge|25": { "acc": 0.6834470989761092, "acc_stderr": 0.013592431519068079, "acc_norm": 0.7184300341296929, "acc_norm_stderr": 0.013143376735009022 }, "harness|hellaswag|10": { "acc": 0.6721768571997611, "acc_stderr": 0.0046846063106423304, "acc_norm": 0.8688508265285799, "acc_norm_stderr": 0.00336873543416138 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.042039210401562783, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.042039210401562783 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7828947368421053, "acc_stderr": 0.03355045304882924, "acc_norm": 0.7828947368421053, "acc_norm_stderr": 0.03355045304882924 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7433962264150943, "acc_stderr": 0.02688064788905199, "acc_norm": 0.7433962264150943, "acc_norm_stderr": 0.02688064788905199 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8194444444444444, "acc_stderr": 0.03216600808802267, "acc_norm": 0.8194444444444444, "acc_norm_stderr": 0.03216600808802267 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105654, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6553191489361702, "acc_stderr": 0.031068985963122145, "acc_norm": 0.6553191489361702, "acc_norm_stderr": 0.031068985963122145 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.41228070175438597, "acc_stderr": 0.04630653203366595, "acc_norm": 0.41228070175438597, "acc_norm_stderr": 0.04630653203366595 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6344827586206897, "acc_stderr": 0.04013124195424386, "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.04013124195424386 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4656084656084656, "acc_stderr": 0.02569032176249384, "acc_norm": 0.4656084656084656, "acc_norm_stderr": 0.02569032176249384 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.832258064516129, "acc_stderr": 0.021255464065371314, "acc_norm": 0.832258064516129, "acc_norm_stderr": 0.021255464065371314 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5467980295566502, "acc_stderr": 0.03502544650845872, "acc_norm": 0.5467980295566502, "acc_norm_stderr": 0.03502544650845872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8303030303030303, "acc_stderr": 0.02931118867498312, "acc_norm": 0.8303030303030303, "acc_norm_stderr": 0.02931118867498312 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.9040404040404041, "acc_stderr": 0.020984808610047933, "acc_norm": 0.9040404040404041, "acc_norm_stderr": 0.020984808610047933 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.927461139896373, "acc_stderr": 0.018718998520678178, "acc_norm": 0.927461139896373, "acc_norm_stderr": 0.018718998520678178 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6974358974358974, "acc_stderr": 0.02329088805377272, "acc_norm": 0.6974358974358974, "acc_norm_stderr": 0.02329088805377272 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.028578348365473072, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.028578348365473072 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7815126050420168, "acc_stderr": 0.026841514322958934, "acc_norm": 0.7815126050420168, "acc_norm_stderr": 0.026841514322958934 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.47019867549668876, "acc_stderr": 0.040752249922169775, "acc_norm": 0.47019867549668876, "acc_norm_stderr": 0.040752249922169775 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8844036697247707, "acc_stderr": 0.013708749534172636, "acc_norm": 0.8844036697247707, "acc_norm_stderr": 0.013708749534172636 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5648148148148148, "acc_stderr": 0.033812000056435254, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.033812000056435254 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8970588235294118, "acc_stderr": 0.021328337570804365, "acc_norm": 0.8970588235294118, "acc_norm_stderr": 0.021328337570804365 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8776371308016878, "acc_stderr": 0.02133174182974679, "acc_norm": 0.8776371308016878, "acc_norm_stderr": 0.02133174182974679 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7668161434977578, "acc_stderr": 0.028380391147094706, "acc_norm": 0.7668161434977578, "acc_norm_stderr": 0.028380391147094706 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.03336820338476073, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.03336820338476073 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8512396694214877, "acc_stderr": 0.03248470083807194, "acc_norm": 0.8512396694214877, "acc_norm_stderr": 0.03248470083807194 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8333333333333334, "acc_stderr": 0.03602814176392645, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.03602814176392645 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8220858895705522, "acc_stderr": 0.03004735765580663, "acc_norm": 0.8220858895705522, "acc_norm_stderr": 0.03004735765580663 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.8252427184466019, "acc_stderr": 0.0376017800602662, "acc_norm": 0.8252427184466019, "acc_norm_stderr": 0.0376017800602662 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.02058849131609238, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.02058849131609238 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.04560480215720684, "acc_norm": 0.71, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8659003831417624, "acc_stderr": 0.012185528166499983, "acc_norm": 0.8659003831417624, "acc_norm_stderr": 0.012185528166499983 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7774566473988439, "acc_stderr": 0.02239421566194282, "acc_norm": 0.7774566473988439, "acc_norm_stderr": 0.02239421566194282 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.5597765363128492, "acc_stderr": 0.01660256461504993, "acc_norm": 0.5597765363128492, "acc_norm_stderr": 0.01660256461504993 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7352941176470589, "acc_stderr": 0.02526169121972949, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.02526169121972949 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7588424437299035, "acc_stderr": 0.024296594034763426, "acc_norm": 0.7588424437299035, "acc_norm_stderr": 0.024296594034763426 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7962962962962963, "acc_stderr": 0.022409674547304168, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.022409674547304168 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5531914893617021, "acc_stderr": 0.02965823509766691, "acc_norm": 0.5531914893617021, "acc_norm_stderr": 0.02965823509766691 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5586701434159062, "acc_stderr": 0.012682016335646678, "acc_norm": 0.5586701434159062, "acc_norm_stderr": 0.012682016335646678 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7352941176470589, "acc_stderr": 0.026799562024887653, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.026799562024887653 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7434640522875817, "acc_stderr": 0.017667841612379005, "acc_norm": 0.7434640522875817, "acc_norm_stderr": 0.017667841612379005 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7454545454545455, "acc_stderr": 0.041723430387053825, "acc_norm": 0.7454545454545455, "acc_norm_stderr": 0.041723430387053825 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7795918367346939, "acc_stderr": 0.026537045312145298, "acc_norm": 0.7795918367346939, "acc_norm_stderr": 0.026537045312145298 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8855721393034826, "acc_stderr": 0.022509345325101706, "acc_norm": 0.8855721393034826, "acc_norm_stderr": 0.022509345325101706 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8596491228070176, "acc_stderr": 0.0266405825391332, "acc_norm": 0.8596491228070176, "acc_norm_stderr": 0.0266405825391332 }, "harness|truthfulqa:mc|0": { "mc1": 0.4663402692778458, "mc1_stderr": 0.01746379386716811, "mc2": 0.6478807414957388, "mc2_stderr": 0.014914964973799093 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7240831255912781, -0.8987469673156738, 0.2881299555301666, 0.18435774743556976, -0.1984223872423172, -0.08151286840438843, 0.05484169349074364, -0.2935621738433838, 0.6106579899787903, -0.017784764990210533, -0.5133399367332458, -0.6853688359260559, -0.4409627616405487, 0.22198398411273...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1.1
open-llm-leaderboard
2023-09-19T01:01:39Z
129
0
null
[ "region:us" ]
2023-09-19T01:01:39Z
2023-09-19T01:00:38.000Z
2023-09-19T01:00:38
--- pretty_name: Evaluation run of ICBU-NPU/FashionGPT-70B-V1.1 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [ICBU-NPU/FashionGPT-70B-V1.1](https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1.1)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1.1\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-19T01:00:15.258935](https://huggingface.co/datasets/open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1.1/blob/main/results_2023-09-19T01-00-15.258935.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7092811864393619,\n\ \ \"acc_stderr\": 0.03073791163351864,\n \"acc_norm\": 0.7129018476555437,\n\ \ \"acc_norm_stderr\": 0.030707851474891332,\n \"mc1\": 0.4528763769889841,\n\ \ \"mc1_stderr\": 0.01742558984831402,\n \"mc2\": 0.6526486216773408,\n\ \ \"mc2_stderr\": 0.01469598165350219\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.6843003412969283,\n \"acc_stderr\": 0.013582571095815291,\n\ \ \"acc_norm\": 0.7175767918088737,\n \"acc_norm_stderr\": 0.013155456884097222\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7016530571599283,\n\ \ \"acc_stderr\": 0.004565974937793703,\n \"acc_norm\": 0.8819956184027086,\n\ \ \"acc_norm_stderr\": 0.0032195397905004797\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n\ \ \"acc_stderr\": 0.04171654161354543,\n \"acc_norm\": 0.6296296296296297,\n\ \ \"acc_norm_stderr\": 0.04171654161354543\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.8289473684210527,\n \"acc_stderr\": 0.03064360707167709,\n\ \ \"acc_norm\": 0.8289473684210527,\n \"acc_norm_stderr\": 0.03064360707167709\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.77,\n\ \ \"acc_stderr\": 0.042295258468165044,\n \"acc_norm\": 0.77,\n \ \ \"acc_norm_stderr\": 0.042295258468165044\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.7433962264150943,\n \"acc_stderr\": 0.026880647889051985,\n\ \ \"acc_norm\": 0.7433962264150943,\n \"acc_norm_stderr\": 0.026880647889051985\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8333333333333334,\n\ \ \"acc_stderr\": 0.031164899666948617,\n \"acc_norm\": 0.8333333333333334,\n\ \ \"acc_norm_stderr\": 0.031164899666948617\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n\ \ \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6589595375722543,\n\ \ \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.6589595375722543,\n\ \ \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105654,\n\ \ \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.78,\n \"acc_stderr\": 0.04163331998932263,\n \"acc_norm\": 0.78,\n\ \ \"acc_norm_stderr\": 0.04163331998932263\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.7106382978723405,\n \"acc_stderr\": 0.02964400657700962,\n\ \ \"acc_norm\": 0.7106382978723405,\n \"acc_norm_stderr\": 0.02964400657700962\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4649122807017544,\n\ \ \"acc_stderr\": 0.046920083813689104,\n \"acc_norm\": 0.4649122807017544,\n\ \ \"acc_norm_stderr\": 0.046920083813689104\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.6413793103448275,\n \"acc_stderr\": 0.03996629574876719,\n\ \ \"acc_norm\": 0.6413793103448275,\n \"acc_norm_stderr\": 0.03996629574876719\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.4708994708994709,\n \"acc_stderr\": 0.025707658614154954,\n \"\ acc_norm\": 0.4708994708994709,\n \"acc_norm_stderr\": 0.025707658614154954\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5,\n\ \ \"acc_stderr\": 0.04472135954999579,\n \"acc_norm\": 0.5,\n \ \ \"acc_norm_stderr\": 0.04472135954999579\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8290322580645161,\n\ \ \"acc_stderr\": 0.02141724293632159,\n \"acc_norm\": 0.8290322580645161,\n\ \ \"acc_norm_stderr\": 0.02141724293632159\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.5763546798029556,\n \"acc_stderr\": 0.03476725747649037,\n\ \ \"acc_norm\": 0.5763546798029556,\n \"acc_norm_stderr\": 0.03476725747649037\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\"\ : 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.8424242424242424,\n \"acc_stderr\": 0.02845038880528436,\n\ \ \"acc_norm\": 0.8424242424242424,\n \"acc_norm_stderr\": 0.02845038880528436\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.8939393939393939,\n \"acc_stderr\": 0.02193804773885311,\n \"\ acc_norm\": 0.8939393939393939,\n \"acc_norm_stderr\": 0.02193804773885311\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.9326424870466321,\n \"acc_stderr\": 0.018088393839078894,\n\ \ \"acc_norm\": 0.9326424870466321,\n \"acc_norm_stderr\": 0.018088393839078894\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.7128205128205128,\n \"acc_stderr\": 0.022939925418530616,\n\ \ \"acc_norm\": 0.7128205128205128,\n \"acc_norm_stderr\": 0.022939925418530616\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253252,\n \ \ \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253252\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.7773109243697479,\n \"acc_stderr\": 0.02702543349888239,\n \ \ \"acc_norm\": 0.7773109243697479,\n \"acc_norm_stderr\": 0.02702543349888239\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.48344370860927155,\n \"acc_stderr\": 0.040802441856289715,\n \"\ acc_norm\": 0.48344370860927155,\n \"acc_norm_stderr\": 0.040802441856289715\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.9045871559633027,\n \"acc_stderr\": 0.0125958992823358,\n \"acc_norm\"\ : 0.9045871559633027,\n \"acc_norm_stderr\": 0.0125958992823358\n },\n\ \ \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5925925925925926,\n\ \ \"acc_stderr\": 0.03350991604696044,\n \"acc_norm\": 0.5925925925925926,\n\ \ \"acc_norm_stderr\": 0.03350991604696044\n },\n \"harness|hendrycksTest-high_school_us_history|5\"\ : {\n \"acc\": 0.9117647058823529,\n \"acc_stderr\": 0.019907399791316942,\n\ \ \"acc_norm\": 0.9117647058823529,\n \"acc_norm_stderr\": 0.019907399791316942\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.8818565400843882,\n \"acc_stderr\": 0.021011052659878467,\n \ \ \"acc_norm\": 0.8818565400843882,\n \"acc_norm_stderr\": 0.021011052659878467\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8026905829596412,\n\ \ \"acc_stderr\": 0.02670985334496796,\n \"acc_norm\": 0.8026905829596412,\n\ \ \"acc_norm_stderr\": 0.02670985334496796\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.8549618320610687,\n \"acc_stderr\": 0.030884661089515368,\n\ \ \"acc_norm\": 0.8549618320610687,\n \"acc_norm_stderr\": 0.030884661089515368\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.8760330578512396,\n \"acc_stderr\": 0.030083098716035202,\n \"\ acc_norm\": 0.8760330578512396,\n \"acc_norm_stderr\": 0.030083098716035202\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8425925925925926,\n\ \ \"acc_stderr\": 0.035207039905179635,\n \"acc_norm\": 0.8425925925925926,\n\ \ \"acc_norm_stderr\": 0.035207039905179635\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.8343558282208589,\n \"acc_stderr\": 0.029208296231259104,\n\ \ \"acc_norm\": 0.8343558282208589,\n \"acc_norm_stderr\": 0.029208296231259104\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5446428571428571,\n\ \ \"acc_stderr\": 0.04726835553719098,\n \"acc_norm\": 0.5446428571428571,\n\ \ \"acc_norm_stderr\": 0.04726835553719098\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.03675668832233188,\n\ \ \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.03675668832233188\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9145299145299145,\n\ \ \"acc_stderr\": 0.01831589168562585,\n \"acc_norm\": 0.9145299145299145,\n\ \ \"acc_norm_stderr\": 0.01831589168562585\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \ \ \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8646232439335888,\n\ \ \"acc_stderr\": 0.012234384586856478,\n \"acc_norm\": 0.8646232439335888,\n\ \ \"acc_norm_stderr\": 0.012234384586856478\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7774566473988439,\n \"acc_stderr\": 0.02239421566194282,\n\ \ \"acc_norm\": 0.7774566473988439,\n \"acc_norm_stderr\": 0.02239421566194282\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.646927374301676,\n\ \ \"acc_stderr\": 0.015984204545268575,\n \"acc_norm\": 0.646927374301676,\n\ \ \"acc_norm_stderr\": 0.015984204545268575\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7581699346405228,\n \"acc_stderr\": 0.024518195641879334,\n\ \ \"acc_norm\": 0.7581699346405228,\n \"acc_norm_stderr\": 0.024518195641879334\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7717041800643086,\n\ \ \"acc_stderr\": 0.0238393033113982,\n \"acc_norm\": 0.7717041800643086,\n\ \ \"acc_norm_stderr\": 0.0238393033113982\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.8271604938271605,\n \"acc_stderr\": 0.021038517770157375,\n\ \ \"acc_norm\": 0.8271604938271605,\n \"acc_norm_stderr\": 0.021038517770157375\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.6028368794326241,\n \"acc_stderr\": 0.029189805673587085,\n \ \ \"acc_norm\": 0.6028368794326241,\n \"acc_norm_stderr\": 0.029189805673587085\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5867014341590613,\n\ \ \"acc_stderr\": 0.012576779494860075,\n \"acc_norm\": 0.5867014341590613,\n\ \ \"acc_norm_stderr\": 0.012576779494860075\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.7463235294117647,\n \"acc_stderr\": 0.026431329870789524,\n\ \ \"acc_norm\": 0.7463235294117647,\n \"acc_norm_stderr\": 0.026431329870789524\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.7647058823529411,\n \"acc_stderr\": 0.017160587235046352,\n \ \ \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.017160587235046352\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7363636363636363,\n\ \ \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.7363636363636363,\n\ \ \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.8122448979591836,\n \"acc_stderr\": 0.02500025603954619,\n\ \ \"acc_norm\": 0.8122448979591836,\n \"acc_norm_stderr\": 0.02500025603954619\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8855721393034826,\n\ \ \"acc_stderr\": 0.022509345325101713,\n \"acc_norm\": 0.8855721393034826,\n\ \ \"acc_norm_stderr\": 0.022509345325101713\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \ \ \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n\ \ \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n\ \ \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8713450292397661,\n \"acc_stderr\": 0.02567934272327692,\n\ \ \"acc_norm\": 0.8713450292397661,\n \"acc_norm_stderr\": 0.02567934272327692\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4528763769889841,\n\ \ \"mc1_stderr\": 0.01742558984831402,\n \"mc2\": 0.6526486216773408,\n\ \ \"mc2_stderr\": 0.01469598165350219\n }\n}\n```" repo_url: https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1.1 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|arc:challenge|25_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hellaswag|10_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-19T01-00-15.258935.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-management|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T01-00-15.258935.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_19T01_00_15.258935 path: - '**/details_harness|truthfulqa:mc|0_2023-09-19T01-00-15.258935.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-19T01-00-15.258935.parquet' - config_name: results data_files: - split: 2023_09_19T01_00_15.258935 path: - results_2023-09-19T01-00-15.258935.parquet - split: latest path: - results_2023-09-19T01-00-15.258935.parquet --- # Dataset Card for Evaluation run of ICBU-NPU/FashionGPT-70B-V1.1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1.1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [ICBU-NPU/FashionGPT-70B-V1.1](https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1.1", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-19T01:00:15.258935](https://huggingface.co/datasets/open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1.1/blob/main/results_2023-09-19T01-00-15.258935.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.7092811864393619, "acc_stderr": 0.03073791163351864, "acc_norm": 0.7129018476555437, "acc_norm_stderr": 0.030707851474891332, "mc1": 0.4528763769889841, "mc1_stderr": 0.01742558984831402, "mc2": 0.6526486216773408, "mc2_stderr": 0.01469598165350219 }, "harness|arc:challenge|25": { "acc": 0.6843003412969283, "acc_stderr": 0.013582571095815291, "acc_norm": 0.7175767918088737, "acc_norm_stderr": 0.013155456884097222 }, "harness|hellaswag|10": { "acc": 0.7016530571599283, "acc_stderr": 0.004565974937793703, "acc_norm": 0.8819956184027086, "acc_norm_stderr": 0.0032195397905004797 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.04171654161354543, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.04171654161354543 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.8289473684210527, "acc_stderr": 0.03064360707167709, "acc_norm": 0.8289473684210527, "acc_norm_stderr": 0.03064360707167709 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.77, "acc_stderr": 0.042295258468165044, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165044 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7433962264150943, "acc_stderr": 0.026880647889051985, "acc_norm": 0.7433962264150943, "acc_norm_stderr": 0.026880647889051985 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8333333333333334, "acc_stderr": 0.031164899666948617, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.031164899666948617 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6589595375722543, "acc_stderr": 0.036146654241808254, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105654, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932263, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932263 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.7106382978723405, "acc_stderr": 0.02964400657700962, "acc_norm": 0.7106382978723405, "acc_norm_stderr": 0.02964400657700962 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4649122807017544, "acc_stderr": 0.046920083813689104, "acc_norm": 0.4649122807017544, "acc_norm_stderr": 0.046920083813689104 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6413793103448275, "acc_stderr": 0.03996629574876719, "acc_norm": 0.6413793103448275, "acc_norm_stderr": 0.03996629574876719 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4708994708994709, "acc_stderr": 0.025707658614154954, "acc_norm": 0.4708994708994709, "acc_norm_stderr": 0.025707658614154954 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5, "acc_stderr": 0.04472135954999579, "acc_norm": 0.5, "acc_norm_stderr": 0.04472135954999579 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8290322580645161, "acc_stderr": 0.02141724293632159, "acc_norm": 0.8290322580645161, "acc_norm_stderr": 0.02141724293632159 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5763546798029556, "acc_stderr": 0.03476725747649037, "acc_norm": 0.5763546798029556, "acc_norm_stderr": 0.03476725747649037 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8424242424242424, "acc_stderr": 0.02845038880528436, "acc_norm": 0.8424242424242424, "acc_norm_stderr": 0.02845038880528436 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8939393939393939, "acc_stderr": 0.02193804773885311, "acc_norm": 0.8939393939393939, "acc_norm_stderr": 0.02193804773885311 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9326424870466321, "acc_stderr": 0.018088393839078894, "acc_norm": 0.9326424870466321, "acc_norm_stderr": 0.018088393839078894 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7128205128205128, "acc_stderr": 0.022939925418530616, "acc_norm": 0.7128205128205128, "acc_norm_stderr": 0.022939925418530616 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.028820884666253252, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.028820884666253252 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7773109243697479, "acc_stderr": 0.02702543349888239, "acc_norm": 0.7773109243697479, "acc_norm_stderr": 0.02702543349888239 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.48344370860927155, "acc_stderr": 0.040802441856289715, "acc_norm": 0.48344370860927155, "acc_norm_stderr": 0.040802441856289715 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.9045871559633027, "acc_stderr": 0.0125958992823358, "acc_norm": 0.9045871559633027, "acc_norm_stderr": 0.0125958992823358 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5925925925925926, "acc_stderr": 0.03350991604696044, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.03350991604696044 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.9117647058823529, "acc_stderr": 0.019907399791316942, "acc_norm": 0.9117647058823529, "acc_norm_stderr": 0.019907399791316942 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8818565400843882, "acc_stderr": 0.021011052659878467, "acc_norm": 0.8818565400843882, "acc_norm_stderr": 0.021011052659878467 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8026905829596412, "acc_stderr": 0.02670985334496796, "acc_norm": 0.8026905829596412, "acc_norm_stderr": 0.02670985334496796 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8549618320610687, "acc_stderr": 0.030884661089515368, "acc_norm": 0.8549618320610687, "acc_norm_stderr": 0.030884661089515368 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8760330578512396, "acc_stderr": 0.030083098716035202, "acc_norm": 0.8760330578512396, "acc_norm_stderr": 0.030083098716035202 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8425925925925926, "acc_stderr": 0.035207039905179635, "acc_norm": 0.8425925925925926, "acc_norm_stderr": 0.035207039905179635 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8343558282208589, "acc_stderr": 0.029208296231259104, "acc_norm": 0.8343558282208589, "acc_norm_stderr": 0.029208296231259104 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5446428571428571, "acc_stderr": 0.04726835553719098, "acc_norm": 0.5446428571428571, "acc_norm_stderr": 0.04726835553719098 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.03675668832233188, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.03675668832233188 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9145299145299145, "acc_stderr": 0.01831589168562585, "acc_norm": 0.9145299145299145, "acc_norm_stderr": 0.01831589168562585 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8646232439335888, "acc_stderr": 0.012234384586856478, "acc_norm": 0.8646232439335888, "acc_norm_stderr": 0.012234384586856478 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7774566473988439, "acc_stderr": 0.02239421566194282, "acc_norm": 0.7774566473988439, "acc_norm_stderr": 0.02239421566194282 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.646927374301676, "acc_stderr": 0.015984204545268575, "acc_norm": 0.646927374301676, "acc_norm_stderr": 0.015984204545268575 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7581699346405228, "acc_stderr": 0.024518195641879334, "acc_norm": 0.7581699346405228, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7717041800643086, "acc_stderr": 0.0238393033113982, "acc_norm": 0.7717041800643086, "acc_norm_stderr": 0.0238393033113982 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8271604938271605, "acc_stderr": 0.021038517770157375, "acc_norm": 0.8271604938271605, "acc_norm_stderr": 0.021038517770157375 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.6028368794326241, "acc_stderr": 0.029189805673587085, "acc_norm": 0.6028368794326241, "acc_norm_stderr": 0.029189805673587085 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5867014341590613, "acc_stderr": 0.012576779494860075, "acc_norm": 0.5867014341590613, "acc_norm_stderr": 0.012576779494860075 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7463235294117647, "acc_stderr": 0.026431329870789524, "acc_norm": 0.7463235294117647, "acc_norm_stderr": 0.026431329870789524 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7647058823529411, "acc_stderr": 0.017160587235046352, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.017160587235046352 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7363636363636363, "acc_stderr": 0.04220224692971987, "acc_norm": 0.7363636363636363, "acc_norm_stderr": 0.04220224692971987 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8122448979591836, "acc_stderr": 0.02500025603954619, "acc_norm": 0.8122448979591836, "acc_norm_stderr": 0.02500025603954619 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8855721393034826, "acc_stderr": 0.022509345325101713, "acc_norm": 0.8855721393034826, "acc_norm_stderr": 0.022509345325101713 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8713450292397661, "acc_stderr": 0.02567934272327692, "acc_norm": 0.8713450292397661, "acc_norm_stderr": 0.02567934272327692 }, "harness|truthfulqa:mc|0": { "mc1": 0.4528763769889841, "mc1_stderr": 0.01742558984831402, "mc2": 0.6526486216773408, "mc2_stderr": 0.01469598165350219 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7489234805107117, -0.8492383360862732, 0.2456246167421341, 0.2504158914089203, -0.2111755907535553, -0.03305363655090332, 0.03670487925410271, -0.22612455487251282, 0.5746283531188965, -0.07885277271270752, -0.5164567232131958, -0.7010749578475952, -0.44486141204833984, 0.23408314585685...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1
open-llm-leaderboard
2023-10-29T15:25:02Z
129
0
null
[ "region:us" ]
2023-10-29T15:25:02Z
2023-09-19T01:12:41.000Z
2023-09-19T01:12:41
--- pretty_name: Evaluation run of ICBU-NPU/FashionGPT-70B-V1 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [ICBU-NPU/FashionGPT-70B-V1](https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-10-29T15:24:49.736716](https://huggingface.co/datasets/open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1/blob/main/results_2023-10-29T15-24-49.736716.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0019924496644295304,\n\ \ \"em_stderr\": 0.0004566676462666993,\n \"f1\": 0.07125104865771813,\n\ \ \"f1_stderr\": 0.0014102826102321945,\n \"acc\": 0.5589478168926856,\n\ \ \"acc_stderr\": 0.011387742640607\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0019924496644295304,\n \"em_stderr\": 0.0004566676462666993,\n\ \ \"f1\": 0.07125104865771813,\n \"f1_stderr\": 0.0014102826102321945\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2812736921910538,\n \ \ \"acc_stderr\": 0.012384789310940239\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.8366219415943172,\n \"acc_stderr\": 0.010390695970273764\n\ \ }\n}\n```" repo_url: https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|arc:challenge|25_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-19T01-12-17.792946.parquet' - config_name: harness_drop_3 data_files: - split: 2023_10_29T15_24_49.736716 path: - '**/details_harness|drop|3_2023-10-29T15-24-49.736716.parquet' - split: latest path: - '**/details_harness|drop|3_2023-10-29T15-24-49.736716.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_10_29T15_24_49.736716 path: - '**/details_harness|gsm8k|5_2023-10-29T15-24-49.736716.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-10-29T15-24-49.736716.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hellaswag|10_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-19T01-12-17.792946.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-management|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-19T01-12-17.792946.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_19T01_12_17.792946 path: - '**/details_harness|truthfulqa:mc|0_2023-09-19T01-12-17.792946.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-19T01-12-17.792946.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_10_29T15_24_49.736716 path: - '**/details_harness|winogrande|5_2023-10-29T15-24-49.736716.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-10-29T15-24-49.736716.parquet' - config_name: results data_files: - split: 2023_09_19T01_12_17.792946 path: - results_2023-09-19T01-12-17.792946.parquet - split: 2023_10_29T15_24_49.736716 path: - results_2023-10-29T15-24-49.736716.parquet - split: latest path: - results_2023-10-29T15-24-49.736716.parquet --- # Dataset Card for Evaluation run of ICBU-NPU/FashionGPT-70B-V1 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [ICBU-NPU/FashionGPT-70B-V1](https://huggingface.co/ICBU-NPU/FashionGPT-70B-V1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-10-29T15:24:49.736716](https://huggingface.co/datasets/open-llm-leaderboard/details_ICBU-NPU__FashionGPT-70B-V1/blob/main/results_2023-10-29T15-24-49.736716.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0019924496644295304, "em_stderr": 0.0004566676462666993, "f1": 0.07125104865771813, "f1_stderr": 0.0014102826102321945, "acc": 0.5589478168926856, "acc_stderr": 0.011387742640607 }, "harness|drop|3": { "em": 0.0019924496644295304, "em_stderr": 0.0004566676462666993, "f1": 0.07125104865771813, "f1_stderr": 0.0014102826102321945 }, "harness|gsm8k|5": { "acc": 0.2812736921910538, "acc_stderr": 0.012384789310940239 }, "harness|winogrande|5": { "acc": 0.8366219415943172, "acc_stderr": 0.010390695970273764 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.43965792655944824, -0.6758859157562256, 0.12209302186965942, 0.27678176760673523, -0.2664851248264313, 0.17793001234531403, -0.31815090775489807, -0.20684601366519928, 0.3883850872516632, 0.4299094080924988, -0.7331624031066895, -0.9222863912582397, -0.6443101763725281, 0.14248649775981...
null
null
null
null
null
null
null
null
null
null
null
null
null