datasetId stringlengths 2 117 | card stringlengths 19 1.01M |
|---|---|
freshpearYoon/vr_train_free_10 | ---
dataset_info:
features:
- name: audio
struct:
- name: array
sequence: float64
- name: path
dtype: string
- name: sampling_rate
dtype: int64
- name: filename
dtype: string
- name: NumOfUtterance
dtype: int64
- name: text
dtype: string
- name: samplingrate
dtype: int64
- name: begin_time
dtype: float64
- name: end_time
dtype: float64
- name: speaker_id
dtype: string
- name: directory
dtype: string
splits:
- name: train
num_bytes: 7143134749
num_examples: 10000
download_size: 1080163057
dataset_size: 7143134749
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
liuyanchen1015/MULTI_VALUE_stsb_got_gotten | ---
dataset_info:
features:
- name: sentence1
dtype: string
- name: sentence2
dtype: string
- name: score
dtype: float64
- name: idx
dtype: int64
- name: value_score
dtype: int64
splits:
- name: dev
num_bytes: 450
num_examples: 2
- name: test
num_bytes: 72
num_examples: 1
- name: train
num_bytes: 274
num_examples: 1
download_size: 9256
dataset_size: 796
---
# Dataset Card for "MULTI_VALUE_stsb_got_gotten"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
benchan79/github-issues | ---
dataset_info:
features:
- name: url
dtype: string
- name: repository_url
dtype: string
- name: labels_url
dtype: string
- name: comments_url
dtype: string
- name: events_url
dtype: string
- name: html_url
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: number
dtype: int64
- name: title
dtype: string
- name: user
struct:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: labels
list:
- name: id
dtype: int64
- name: node_id
dtype: string
- name: url
dtype: string
- name: name
dtype: string
- name: color
dtype: string
- name: default
dtype: bool
- name: description
dtype: string
- name: state
dtype: string
- name: locked
dtype: bool
- name: assignee
struct:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: assignees
list:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: milestone
struct:
- name: url
dtype: string
- name: html_url
dtype: string
- name: labels_url
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: number
dtype: int64
- name: title
dtype: string
- name: description
dtype: string
- name: creator
struct:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: open_issues
dtype: int64
- name: closed_issues
dtype: int64
- name: state
dtype: string
- name: created_at
dtype: timestamp[s]
- name: updated_at
dtype: timestamp[s]
- name: due_on
dtype: 'null'
- name: closed_at
dtype: 'null'
- name: comments
sequence: string
- name: created_at
dtype: timestamp[s]
- name: updated_at
dtype: timestamp[s]
- name: closed_at
dtype: timestamp[s]
- name: author_association
dtype: string
- name: active_lock_reason
dtype: 'null'
- name: body
dtype: string
- name: reactions
struct:
- name: url
dtype: string
- name: total_count
dtype: int64
- name: '+1'
dtype: int64
- name: '-1'
dtype: int64
- name: laugh
dtype: int64
- name: hooray
dtype: int64
- name: confused
dtype: int64
- name: heart
dtype: int64
- name: rocket
dtype: int64
- name: eyes
dtype: int64
- name: timeline_url
dtype: string
- name: performed_via_github_app
dtype: 'null'
- name: state_reason
dtype: string
- name: draft
dtype: bool
- name: pull_request
struct:
- name: url
dtype: string
- name: html_url
dtype: string
- name: diff_url
dtype: string
- name: patch_url
dtype: string
- name: merged_at
dtype: timestamp[s]
- name: is_pull_request
dtype: bool
splits:
- name: train
num_bytes: 15437002
num_examples: 3100
download_size: 4434085
dataset_size: 15437002
annotations_creators:
- no-annotation
language:
- en
language_creators:
- found
license:
- unknown
multilinguality:
- monolingual
pretty_name: Hugging Face GitHub Issues
size_categories:
- unknown
source_datasets:
- original
tags: []
task_categories:
- text-classification
- text-retrieval
task_ids:
- multi-class-classification
- multi-label-classification
- document-retrieval
---
# Dataset Card for "Hugging Face GitHub Issues
## Dataset Description
- **Point of Contact:** [Ben Chan](benchan79@gmail.com)
### Dataset Summary
GitHub Issues is a dataset consisting of GitHub issues and pull requests associated with the 🤗 Datasets [repository](https://github.com/huggingface/datasets). It is intended for educational purposes and can be used for semantic search or multilabel text classification. The contents of each GitHub issue are in English and concern the domain of datasets for NLP, computer vision, and beyond.
### Supported Tasks and Leaderboards
### Languages
English
## Dataset Structure
### Data Instances
### Data Fields
### Data Splits
## Dataset Creation
### Curation Rationale
### Source Data
#### Initial Data Collection and Normalization
#### Who are the source language producers?
### Annotations
#### Annotation process
#### Who are the annotators?
### Personal and Sensitive Information
## Considerations for Using the Data
### Social Impact of Dataset
### Discussion of Biases
### Other Known Limitations
## Additional Information
### Dataset Curators
### Licensing Information
### Citation Information
### Contributions
|
DeepPavlov/verbalist_prompts | ---
configs:
- config_name: default
data_files:
- split: dim_oasst_en
path: data/dim_oasst_en-*
- split: dim_oasst_ru
path: data/dim_oasst_ru-*
- split: dim_lima
path: data/dim_lima-*
- split: dim_logic_tasks_ru
path: data/dim_logic_tasks_ru-*
- split: dim_wikihow_en
path: data/dim_wikihow_en-*
- split: dim_wikihow_ru
path: data/dim_wikihow_ru-*
- split: dim_essayforum_writing_prompts_6k
path: data/dim_essayforum_writing_prompts_6k-*
- split: dim_sharegpt_short_ru
path: data/dim_sharegpt_short_ru-*
- split: dim_openreview_prompts_65
path: data/dim_openreview_prompts_65-*
- split: dim_roleplay_instruct_v2_final
path: data/dim_roleplay_instruct_v2_final-*
- split: dim_kinomania_scripts
path: data/dim_kinomania_scripts-*
- split: dim_bugurt_thread_prompts
path: data/dim_bugurt_thread_prompts-*
- split: dim_russian_lyrics_prompts
path: data/dim_russian_lyrics_prompts-*
- split: dim_ru_instruct_gpt4
path: data/dim_ru_instruct_gpt4-*
- split: dim_gpt_roleplay_realm
path: data/dim_gpt_roleplay_realm-*
- split: dim_ultrachat_ru
path: data/dim_ultrachat_ru-*
- split: dim_scitldr
path: data/dim_scitldr-*
- split: dim_linux_man_pages_tldr_summarized
path: data/dim_linux_man_pages_tldr_summarized-*
- split: dim_dolphin_ru_3k
path: data/dim_dolphin_ru_3k-*
- split: dim_runne_prompts
path: data/dim_runne_prompts-*
- split: dim_lurk_prompts
path: data/dim_lurk_prompts-*
- split: dim_panorama_prompts_10k
path: data/dim_panorama_prompts_10k-*
- split: dim_resh_edu_short_prompts
path: data/dim_resh_edu_short_prompts-*
- split: dim_databricks_dolly_15k_ru
path: data/dim_databricks_dolly_15k_ru-*
- split: dim_databricks_dolly_15k_en
path: data/dim_databricks_dolly_15k_en-*
- split: dim_grammarly_coedit
path: data/dim_grammarly_coedit-*
- split: dim_kinopoisk_prompts
path: data/dim_kinopoisk_prompts-*
- split: dim_medical_qa_ru_prompts
path: data/dim_medical_qa_ru_prompts-*
- split: dim_joke_explaination_prompts
path: data/dim_joke_explaination_prompts-*
- split: dim_oa_stackexchange_200k
path: data/dim_oa_stackexchange_200k-*
- split: dim_scale_helpful_no_math
path: data/dim_scale_helpful_no_math-*
- split: dim_law_stackexchange_prompts
path: data/dim_law_stackexchange_prompts-*
- split: dim_ficbook_prompts_best_10k
path: data/dim_ficbook_prompts_best_10k-*
- split: dim_azbyka_logic_ru
path: data/dim_azbyka_logic_ru-*
- split: dim_povarenok
path: data/dim_povarenok-*
- split: dim_AO3_fandom_chatbot_1to1
path: data/dim_AO3_fandom_chatbot_1to1-*
- split: dim_habr_prompts_5k
path: data/dim_habr_prompts_5k-*
- split: dim_what_where_when_50k
path: data/dim_what_where_when_50k-*
- split: dim_competition_math
path: data/dim_competition_math-*
- split: dim_sharegpt_short_en_30k
path: data/dim_sharegpt_short_en_30k-*
- split: dim_ru_turbo_alpaca_evol_instruct
path: data/dim_ru_turbo_alpaca_evol_instruct-*
- split: dim_ru_turbo_saiga
path: data/dim_ru_turbo_saiga-*
- split: dim_bugurt_completion_prompts
path: data/dim_bugurt_completion_prompts-*
- split: dim_tldr_17_50k
path: data/dim_tldr_17_50k-*
- split: dim_grade_school_math_instructions
path: data/dim_grade_school_math_instructions-*
- split: dim_tldr_news
path: data/dim_tldr_news-*
- split: dim_grade_school_math_instructions_ru
path: data/dim_grade_school_math_instructions_ru-*
- split: dim_dialogsum
path: data/dim_dialogsum-*
- split: dim_HC3_ru
path: data/dim_HC3_ru-*
- split: dim_horoscopes_ru_10k
path: data/dim_horoscopes_ru_10k-*
- split: dim_yandex_q_200k
path: data/dim_yandex_q_200k-*
- split: dim_leetcodesolutions_en_2k
path: data/dim_leetcodesolutions_en_2k-*
- split: dim_forum_uristov_rf_prompts
path: data/dim_forum_uristov_rf_prompts-*
- split: dim_dialogsum_ru
path: data/dim_dialogsum_ru-*
- split: dim_huggingartists_prompts
path: data/dim_huggingartists_prompts-*
dataset_info:
features:
- name: conversation_text
sequence: string
splits:
- name: dim_oasst_en
num_bytes: 4335500
num_examples: 2289
- name: dim_oasst_ru
num_bytes: 6206378
num_examples: 2220
- name: dim_lima
num_bytes: 2892267
num_examples: 1030
- name: dim_logic_tasks_ru
num_bytes: 76915
num_examples: 86
- name: dim_wikihow_en
num_bytes: 16008199
num_examples: 1995
- name: dim_wikihow_ru
num_bytes: 24451573
num_examples: 2058
- name: dim_essayforum_writing_prompts_6k
num_bytes: 22326330
num_examples: 6361
- name: dim_sharegpt_short_ru
num_bytes: 808319
num_examples: 253
- name: dim_openreview_prompts_65
num_bytes: 6739952
num_examples: 150
- name: dim_roleplay_instruct_v2_final
num_bytes: 4389286
num_examples: 7188
- name: dim_kinomania_scripts
num_bytes: 238731
num_examples: 27
- name: dim_bugurt_thread_prompts
num_bytes: 302191
num_examples: 223
- name: dim_russian_lyrics_prompts
num_bytes: 18676
num_examples: 43
- name: dim_ru_instruct_gpt4
num_bytes: 18351658
num_examples: 14222
- name: dim_gpt_roleplay_realm
num_bytes: 20163429
num_examples: 8700
- name: dim_ultrachat_ru
num_bytes: 4495105
num_examples: 500
- name: dim_scitldr
num_bytes: 4049209
num_examples: 3229
- name: dim_linux_man_pages_tldr_summarized
num_bytes: 3006631
num_examples: 481
- name: dim_dolphin_ru_3k
num_bytes: 7976776
num_examples: 3000
- name: dim_runne_prompts
num_bytes: 2686148
num_examples: 537
- name: dim_lurk_prompts
num_bytes: 92012533
num_examples: 5671
- name: dim_panorama_prompts_10k
num_bytes: 28964132
num_examples: 11024
- name: dim_resh_edu_short_prompts
num_bytes: 12380000
num_examples: 2106
- name: dim_databricks_dolly_15k_ru
num_bytes: 21900617
num_examples: 14914
- name: dim_databricks_dolly_15k_en
num_bytes: 11973713
num_examples: 15011
- name: dim_grammarly_coedit
num_bytes: 18500223
num_examples: 82466
- name: dim_kinopoisk_prompts
num_bytes: 136323982
num_examples: 36591
- name: dim_medical_qa_ru_prompts
num_bytes: 75634717
num_examples: 80101
- name: dim_joke_explaination_prompts
num_bytes: 196224
num_examples: 364
- name: dim_oa_stackexchange_200k
num_bytes: 192535277
num_examples: 200000
- name: dim_scale_helpful_no_math
num_bytes: 85610911
num_examples: 17095
- name: dim_law_stackexchange_prompts
num_bytes: 64544963
num_examples: 24343
- name: dim_ficbook_prompts_best_10k
num_bytes: 75867114
num_examples: 10000
- name: dim_azbyka_logic_ru
num_bytes: 173101
num_examples: 480
- name: dim_povarenok
num_bytes: 93518909
num_examples: 46500
- name: dim_AO3_fandom_chatbot_1to1
num_bytes: 1162058
num_examples: 614
- name: dim_habr_prompts_5k
num_bytes: 40224997
num_examples: 5000
- name: dim_what_where_when_50k
num_bytes: 38385243
num_examples: 50000
- name: dim_competition_math
num_bytes: 5808689
num_examples: 7500
- name: dim_sharegpt_short_en_30k
num_bytes: 86599862
num_examples: 29597
- name: dim_ru_turbo_alpaca_evol_instruct
num_bytes: 105340901
num_examples: 47793
- name: dim_ru_turbo_saiga
num_bytes: 79875722
num_examples: 37699
- name: dim_bugurt_completion_prompts
num_bytes: 5471066
num_examples: 5000
- name: dim_tldr_17_50k
num_bytes: 81185070
num_examples: 50000
- name: dim_grade_school_math_instructions
num_bytes: 4655452
num_examples: 8792
- name: dim_tldr_news
num_bytes: 4014718
num_examples: 7138
- name: dim_grade_school_math_instructions_ru
num_bytes: 6845510
num_examples: 7473
- name: dim_dialogsum
num_bytes: 11176807
num_examples: 12460
- name: dim_HC3_ru
num_bytes: 43395731
num_examples: 24322
- name: dim_horoscopes_ru_10k
num_bytes: 9489348
num_examples: 10000
- name: dim_yandex_q_200k
num_bytes: 292443135
num_examples: 200000
- name: dim_leetcodesolutions_en_2k
num_bytes: 4708692
num_examples: 2048
- name: dim_forum_uristov_rf_prompts
num_bytes: 2757263
num_examples: 1849
- name: dim_dialogsum_ru
num_bytes: 18657989
num_examples: 12460
- name: dim_huggingartists_prompts
num_bytes: 121909835
num_examples: 64006
download_size: 0
dataset_size: 2023767777
language:
- ru
- en
---
# Verbalist (буквоед) - русскоязычный ассистент.
Проект во многом вдохновленный [Saiga](https://huggingface.co/IlyaGusev/saiga2_7b_lora).
Мною были собраны все самые качественные датасеты с [huggingface.datasets](https://huggingface.co/datasets), а также собраны дополнительно с тех сайтов, которые я посчитал весьма полезными для создания аналога ChatGPT. Лицензии у всех датасетов отличаются, какие-то по типу [OpenAssistant/oasst1](https://huggingface.co/datasets/OpenAssistant/oasst1) были созданы специально для обучения подобных моделей, какие-то являются прямой выгрузкой диалогов с ChatGPT ([RyokoAI/ShareGPT52K](https://huggingface.co/datasets/RyokoAI/ShareGPT52K)).
Вклад данного репозитория состоит в систематизации и стандартизации уже имеющихся датасетов, добавлении новых. А также тренировке моделей на этих данных.
- [google sheets таблица с датасетами и описанием](https://docs.google.com/spreadsheets/d/10xcsINF_c_zUZchT8p-8xIuHDgcuwg63jjl2ortBP9I/edit?usp=sharing)
### Датасеты
- **[Объединенный датасет где все данные уже подготовлены для тренировки диалоговой модели](https://huggingface.co/datasets/dim/verbalist_prompts)**
|name |link |description |original_name |original_source |preparation_script |language|amount_examples|mean_llama_tokens|std |min_llama_tokens|25% |50% |75% |max_llama_tokens|
|-------------------------------------|---------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------|-------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------|--------|---------------|-----------------|-----------|----------------|-------|-------|-------|----------------|
|dim/oasst_en |https://huggingface.co/datasets/dim/oasst_en |OpenAssistant Conversations Dataset на английском языке, который был вручную отфильтрован мной. В исходном датасете около 30% диалогов оказались не корректными. Иногда пользователь, играющий роль ассистента, использовал грубый тон в общении с пользователем, иногда люди просто отвечали "не знаю" на вопросы, и некоторые из вопросов были недостаточно научными или слишком краткими. Вы можете ознакомиться с этой разметкой по следующей ссылке: https://docs.google.com/spreadsheets/d/117t5-Tr-dxdODpyFBkBg5R8GklYBlsvBfeDyjqwz2pA/edit?usp=sharing|2023-04-12_oasst_ready.messages.jsonl.gz |https://huggingface.co/datasets/OpenAssistant/oasst1/blob/main/2023-04-12_oasst_ready.messages.jsonl.gz|https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/oasst |en |2289 |468.6788991 |295.0864391|17 |264 |410 |618 |2332 |
|dim/oasst_ru |https://huggingface.co/datasets/dim/oasst_ru |OpenAssistant Conversations Dataset на русском языке, который был вручную отфильтрован мной. В исходном датасете около 30% диалогов оказались не корректными. Иногда пользователь, играющий роль ассистента, использовал грубый тон в общении с пользователем, иногда люди просто отвечали "не знаю" на вопросы, и некоторые из вопросов были недостаточно научными или слишком краткими. Вы можете ознакомиться с этой разметкой по следующей ссылке: https://docs.google.com/spreadsheets/d/1uiOnqxiytuxrB6u6q2pMSdnMfqjT3arfg8DlT-OWlb0/edit?usp=sharing |2023-04-12_oasst_ready.messages.jsonl.gz |https://huggingface.co/datasets/OpenAssistant/oasst1/blob/main/2023-04-12_oasst_ready.messages.jsonl.gz|https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/oasst |ru |2220 |589.6112613 |479.835392 |7 |278 |465 |763.5 |5028 |
|dim/lima |https://huggingface.co/datasets/dim/lima |Данный датасет включает в себя 1000 высококачественных обучающих примеров на английском языке. Он собран из различных источников, включая Stack Exchange (STEM), Stack Exchange (Other), wikiHow, Pushshift r/WritingPrompts, Natural Instructions, а также уникальные инструкции, созданные авторами статей. Более подробную информацию о датасете можно найти в [соответствующей статье](https://arxiv.org/pdf/2305.11206.pdf). |GAIR/lima |https://huggingface.co/datasets/GAIR/lima |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/lima |en |1030 |712.9456311 |671.179319 |29 |312.75 |488.5 |825 |3920 |
|dim/logic_tasks_ru |https://huggingface.co/datasets/dim/logic_tasks_ru |Данный набор задач по логике для детей взят с веб-сайта https://www.potehechas.ru/zadachi/zadachi.shtml. |Логические задачи - Логика и нестандартное мышление |https://www.potehechas.ru/zadachi/zadachi.shtml |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/logic_tasks_ru |ru |86 |193.0697674 |76.69048422|58 |133.75 |185 |243.5 |432 |
|dim/wikihow_en |https://huggingface.co/datasets/dim/wikihow_en |Данный датасет содержит англоязычные статьи, извлеченные с веб-сайта Wikihow. |0x22almostEvil/multilingual-wikihow-qa-16k |https://huggingface.co/datasets/0x22almostEvil/multilingual-wikihow-qa-16k |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/wiki_how |en |1995 |2037.86416 |870.1910713|265 |1463 |1913 |2461.5 |8988 |
|dim/wikihow_ru |https://huggingface.co/datasets/dim/wikihow_ru |Данный датасет включает в себя русскоязычные статьи, полученные с веб-сайта Wikihow. |0x22almostEvil/multilingual-wikihow-qa-16k |https://huggingface.co/datasets/0x22almostEvil/multilingual-wikihow-qa-16k |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/wiki_how |ru |2058 |2498.119534 |1587.851549|139 |1236.25|2264 |3421.75|10217 |
|dim/essayforum_writing_prompts_6k |https://huggingface.co/datasets/dim/essayforum_writing_prompts_6k |Данный датасет включает в себя запросы на помощь с написанием небольших эссе, размещенные на данном сайте. Ответы в датасете предоставлены исключительно главным администратором сайта. Его ответы были отобраны, поскольку чаще всего они являются наиболее качественными и вдумчивыми. |EssayForum |https://essayforum.com/writing/ |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/essayforum |en |6361 |783.1760729 |285.4314176|258 |629 |742 |879 |4966 |
|dim/sharegpt_short_ru |https://huggingface.co/datasets/dim/sharegpt_short_ru |Очищенная версия русская версия sharegpt. Я попытался вырезать из текста все промпты, где модель извиняется что что-то не может сделать, что она не имеет доступа в интернет. Диалоги, которые противоречат морали модели я просто исключил. Постарался убрать упоминания о том что она модель AI, так как за ролеплейные характеристики отвечают другие датасеты. |RyokoAI/ShareGPT52K |https://huggingface.co/datasets/RyokoAI/ShareGPT52K |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/sharegpt |ru |253 |706.6521739 |494.7437584|13 |310 |628 |1078 |1861 |
|dim/openreview_prompts_65 |https://huggingface.co/datasets/dim/openreview_prompts_65 |Датасет рецензий на реальные научные статьи с сайта openreview. Вышло на самом деле не так много, так как многие статьи не выложенны на arxiv или просто не имеют рецензий. Плюс я собрал только малую часть данного сайта, а не все что там было. |https://openreview.net/ |https://openreview.net/ |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/openreview |en |150 |13531.51333 |6966.623686|4893 |8279 |12648.5|15833.5|41494 |
|dim/roleplay_instruct_v2_final |https://huggingface.co/datasets/dim/roleplay_instruct_v2_final |Датасет ролеплея от GPT-4 на различных персонажей на английском языке. |roleplay-instruct-v2-final |https://github.com/teknium1/GPTeacher |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/gpt_roleplay_realm |en |7188 |155.1413467 |97.71215667|14 |88 |125 |192 |1291 |
|dim/kinomania_scripts |https://huggingface.co/datasets/dim/kinomania_scripts |Небольшой датасет, который содержит в себе сценарии фильмов целиком и их краткое содержание |https://www.kinomania.ru/scripts |https://www.kinomania.ru/scripts |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/kinomania_scripts |ru\en |27 |2603.407407 |510.375447 |1887 |2175 |2370 |3069 |3616 |
|dim/bugurt_thread_prompts |https://huggingface.co/datasets/dim/bugurt_thread_prompts |Небольшой набор размеченных бугуртов вместе с моим другом, для того чтобы модель научилась писать бугурты на конкретную ситуацию. Собраны из телеграм паблика БУГУРТ ТРЕД(https://t.me/bugurtthread) |https://t.me/bugurtthread |https://t.me/bugurtthread |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/bugurt_thread |ru |223 |334.4529148 |271.2557988|48 |148.5 |254 |434.5 |1645 |
|dim/russian_lyrics_prompts |https://huggingface.co/datasets/dim/russian_lyrics_prompts |Небольшой датасет промптов собранный мною из различных учебников по стихосложению, чтобы модель научилась писать стихи, используя необходимый литературный прием на конкретную тему. |Учебник стихосложения |https://stihi.ru/uchebnik/ |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/russian_lyrics_prompts |ru |43 |106.1395349 |71.00220701|45 |71 |83 |96.5 |411 |
|dim/ru_instruct_gpt4 |https://huggingface.co/datasets/dim/ru_instruct_gpt4 |Датасет каких-то инструкций на русском сгенерированных GPT-4 |lksy/ru_instruct_gpt4 |https://huggingface.co/datasets/lksy/ru_instruct_gpt4 |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/ru_instruct_gpt4 |ru |14222 |259.2173393 |237.9433891|16 |109 |175 |271 |1374 |
|dim/gpt_roleplay_realm |https://huggingface.co/datasets/dim/gpt_roleplay_realm |Диалоги выдуманных персонажей при помощи GPT-4, диалоги были сгенерированны при помощи GPT-3.5. Русский и английский. |IlyaGusev/gpt_roleplay_realm |https://huggingface.co/datasets/IlyaGusev/gpt_roleplay_realm |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/gpt_roleplay_realm |ru\en |8700 |504.2424138 |117.6228987|180 |424 |489 |569 |1207 |
|dim/ultrachat_ru |https://huggingface.co/datasets/dim/ultrachat_ru |Какой-то рандомный датасет диалогов от chatgpt, который я нашел на huggingface. Из текста диалогов были вырезаны шаблонные фразы по типу: "я не могу выполнить", "как языковая модель" и тд. Потому что обычно после этого следовало вменяемое решение задачи. |kaleinaNyan/UltraChat_ru |https://huggingface.co/datasets/kaleinaNyan/UltraChat_ru |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/ultrachat_ru |ru |500 |1781.782 |901.1212735|267 |1113.25|1648 |2250.25|7303 |
|dim/scitldr |https://huggingface.co/datasets/dim/scitldr |Саммаризация научных статей на английском языке, выполненная экспертами. |allenai/scitldr |https://huggingface.co/datasets/allenai/scitldr |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/scitldr |en |3229 |258.748529 |71.41209752|60 |209 |252 |303 |689 |
|dim/linux_man_pages_tldr_summarized |https://huggingface.co/datasets/dim/linux_man_pages_tldr_summarized |Саммаризация мануалов для инструментов линукс в удобный набор команд с их кратким описанием. |tmskss/linux-man-pages-tldr-summarized |https://huggingface.co/datasets/tmskss/linux-man-pages-tldr-summarized |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/linux-man-pages-tldr-summarized |en |481 |1567.727651 |3590.30871 |96 |405 |765 |1386 |49888 |
|dim/dolphin_ru_3k |https://huggingface.co/datasets/dim/dolphin_ru_3k |Подвыборка размера 3000 переведенных заданий dolphin. Примеры из оригинального датасета это промпты из FLANv2 и решения при помощи GPT-4 или GPT-3.5. |d0rj/dolphin-ru |https://huggingface.co/datasets/d0rj/dolphin-ru |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/dolphin_ru |ru |3000 |556.1133333 |650.0962612|19 |207 |369.5 |720.25 |6787 |
|dim/runne_prompts |https://huggingface.co/datasets/dim/runne_prompts |Промпты составленные из датасета RuNNE. Лично я при обучении сотавил промпт следующим образом. Сначала идет текст "Найди все именованные сущности в данном тексте:", а затем шел сам текст. В качестве выхода модели нужно сгенерировать JSON где содержатся все найденные именованные сущности. К примеру так [{"name": "PERSON", "ent": "Ким Чен Нама", "pos": "0 12"}, {"name": "ORGANIZATION", "ent": "Полиция Малайзии", "pos": "56 72"}] |iluvvatar/RuNNE |https://huggingface.co/datasets/iluvvatar/RuNNE |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/RuNNE |ru |537 |1479.750466 |230.0259174|581 |1337 |1480 |1635 |1988 |
|dim/lurk_prompts |https://huggingface.co/datasets/dim/lurk_prompts |Набор определений различных терминов с сайта lurk. Сами промпты были составлены автоматически следующим образом. напиши определение для (ОПРЕДЕЛЕНИЕ) в стиле lurk |averoo/lurk |https://huggingface.co/datasets/averoo/lurk/viewer/default/train?p=2 |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/lurk |ru |5671 |3450.34262 |4147.897824|35 |710.5 |2010 |4593 |55098 |
|dim/panorama_prompts_10k |https://huggingface.co/datasets/dim/panorama_prompts_10k |Набор юмористических заголовков и текстов новостей с сайта панорама. |its5Q/panorama |https://huggingface.co/datasets/its5Q/panorama |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/panorama |ru |11024 |516.9588171 |191.3774023|36 |422 |498 |585 |3496 |
|dim/resh_edu_short_prompts |https://huggingface.co/datasets/dim/resh_edu_short_prompts |Набор уроков с сайта resh.edu.ru включающих в себя название урока, тему, класс и текст урока с заданиями. |its5Q/resh-edu |https://huggingface.co/datasets/its5Q/resh-edu |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/resh_edu |ru |2106 |1431.510921 |435.7847102|56 |1175.5 |1517 |1777 |2029 |
|dim/databricks_dolly_15k_ru |https://huggingface.co/datasets/dim/databricks_dolly_15k_ru |Переведенный датасет dolly на русский язык. Включает в себя набор инструкций на обширное количество тематик. |dwarf2/databricks-dolly-15k-ru |https://huggingface.co/dwarf2/databricks-dolly-15k-ru |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/databricks_dolly_15k_ru |ru |14914 |305.4638595 |405.874049 |8 |87 |182 |370 |9268 |
|dim/databricks_dolly_15k_en |https://huggingface.co/datasets/dim/databricks_dolly_15k_en |databricks-dolly-15k — это набор данных с открытым исходным кодом, содержащий записи о выполнении инструкций, созданные тысячами сотрудников Databricks в нескольких поведенческих категориях, изложенных в документе InstructGPT, включая мозговой штурм, классификацию, закрытый контроль качества, генерацию, извлечение информации, открытый контроль качества и обобщение. |databricks/databricks-dolly-15k |https://huggingface.co/datasets/databricks/databricks-dolly-15k |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/databricks_dolly_15k_en |en |15011 |204.7264006 |302.5539423|6 |57 |119 |242 |8883 |
|dim/grammarly_coedit |https://huggingface.co/datasets/dim/grammarly_coedit |Набор промптов, которые просят исправить грамматические, стилистические ошибки на английском. |grammarly/coedit |https://huggingface.co/datasets/grammarly/coedit |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/grammarly_coedit |en |82466 |53.7128271 |26.73822864|10 |35 |46 |64 |694 |
|dim/kinopoisk_prompts |https://huggingface.co/datasets/dim/kinopoisk_prompts |Отзывы с кинопоиска на топ 250 фильмов. В промптах я прошу написать хороший, плохой или нейтральный отзыв на определенный фильм. |blinoff/kinopoisk |https://huggingface.co/datasets/blinoff/kinopoisk |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/kinopoisk |ru |36591 |875.0955973 |565.3212035|48 |484 |733 |1117 |8628 |
|dim/medical_qa_ru_prompts |https://huggingface.co/datasets/dim/medical_qa_ru_prompts |Какие-то вопросы и ответы с какого-то медицинского форума. В данной версии датасета только первый ответ из оригинала. |blinoff/medical_qa_ru_data |https://huggingface.co/datasets/blinoff/medical_qa_ru_data |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/medical_qa_ru_data |ru |80101 |206.710528 |175.4343973|12 |106 |161 |247 |5062 |
|dim/joke_explaination_prompts |https://huggingface.co/datasets/dim/joke_explaination_prompts |Объяснение шуток на английском. От изначального датасета отличается тем, что я убрал последнее предложение из объяснения, так как оно ссылается на видео на сайте. |theblackcat102/joke_explaination |https://huggingface.co/datasets/theblackcat102/joke_explaination |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/joke_explaination |en |364 |143.5741758 |68.90275411|21 |99 |137.5 |189.25 |334 |
|dim/oa_stackexchange_200k |https://huggingface.co/datasets/dim/oa_stackexchange_200k |Вопросы-ответы со stackexchange. Оригинальный датасет был составлен следующим образом: были выбраны только темы с принятым ответом, для которых длина вопроса и ответа составляет менее 1000 символов. Другие ответы, вопросы без принятых ответов или длинные записи были удалены. Так как оригинальный датасет слишком большой, я рандомно выбрал 200k семплов. |donfu/oa-stackexchange |https://huggingface.co/datasets/donfu/oa-stackexchange |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/oa_stackexchange |en |200000 |276.29862 |112.5004436|22 |194 |265 |345 |1226 |
|dim/scale_helpful_no_math |https://huggingface.co/datasets/dim/scale_helpful_no_math |Какой-то набор диалогов с вопросами-ответами на английском, происхождение неизвестно. |HuggingFaceH4/scale_helpful_no_math |https://huggingface.co/datasets/HuggingFaceH4/scale_helpful_no_math/viewer/default/train_rm |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/scale_helpful_no_math |en |17095 |1235.302603 |838.1097885|53 |663 |1063 |1617 |34480 |
|dim/law_stackexchange_prompts |https://huggingface.co/datasets/dim/law_stackexchange_prompts |Вопросы про закон на английском языке со StackExchange. Оригинальный датасет был преобразован в markdown. |ymoslem/Law-StackExchange |https://huggingface.co/datasets/ymoslem/Law-StackExchange |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/law_stackexchange |en |24343 |689.1184324 |565.0316906|43 |354 |540 |836 |8969 |
|dim/ficbook_prompts_best_10k |https://huggingface.co/datasets/dim/ficbook_prompts_best_10k |Топ 10k лучших фанфиков с сайта ficbook.net. Все промпты выглядят следующим образом: напиши фанфик с названием {title} и следующим описанием {description}, с тегами {tags}, Где title это оригинальное название, description оригинальное описание, tags это теги данного произведения. |AlexWortega/FicBook |https://huggingface.co/datasets/AlexWortega/FicBook |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/ficbook |ru |10000 |1737.8214 |402.0748161|166 |1716 |1950 |1950 |1952 |
|dim/azbyka_logic_ru |https://huggingface.co/datasets/dim/azbyka_logic_ru |Небольшой набор детских логических и православных задач, взятых с сайта https://azbyka.ru/deti/logicheskie-i-zanimatelnye-zadachi . Обычно у них почти нет развернутого решения, только ответ. Я пытался расписать решение некоторых задач, но меня хватило только на 35, если кто-то займется подобным буду рад https://docs.google.com/spreadsheets/d/1JRbtppbZCUbV_Eqd0nKbRDQEuPnJIAgJ70cUILEDUI4/edit?usp=sharing . |Логические и занимательные задачи (300 задач) |https://azbyka.ru/deti/logicheskie-i-zanimatelnye-zadachi |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/azbyka_logic_ru |ru |480 |77.4375 |77.56990416|14 |31 |50 |91 |652 |
|dim/povarenok |https://huggingface.co/datasets/dim/povarenok |46k лучших рецептов с сайта povarenok.ru, содержит текст рецепта, список ингридиентов, название блюда |https://www.povarenok.ru/recipes/ |https://www.povarenok.ru/recipes/ |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/povarenok |ru |46500 |488.9118495 |344.8563249|31 |281 |440 |632 |5542 |
|dim/AO3_fandom_chatbot_1to1 |https://huggingface.co/datasets/dim/AO3_fandom_chatbot_1to1 |Какой-то набор ролеплейных диалогов с описанием персонажей и их отыгрышем. Происхождение неизвестно. |ebony59/AO3_fandom_chatbot_1to1 |https://huggingface.co/datasets/ebony59/AO3_fandom_chatbot_1to1 |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/AO3_fandom_chatbot_1to1 |en |614 |493.7166124 |226.3885365|129 |328.25 |432.5 |611.75 |1272 |
|dim/habr_prompts_5k |https://huggingface.co/datasets/dim/habr_prompts_5k |Статьи с хабра. Датасет был составлен с помощью chatgpt, chatgpt преобразовывал заголовки таким образом чтобы они звучали как вопросы от пользователя, в качестве таргета выступала сама статья. |IlyaGusev/habr |https://huggingface.co/datasets/IlyaGusev/habr |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/habr |ru |5000 |1732.892 |454.8418369|19 |1920.75|1950 |1951 |1952 |
|dim/what_where_when_50k |https://huggingface.co/datasets/dim/what_where_when_50k |50k вопросов с решениями с сайта что где когда. В качестве промпта выступает вопрос, в качестве ответа конкатенация объяснения и краткого ответа. Все вопросы-ответы вы можете найти по этой ссылке https://huggingface.co/datasets/dim/what_where_when_ru |https://db.chgk.info |https://db.chgk.info |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/what_where_when |ru |50000 |169.1862 |68.91119898|18 |122 |158 |202 |1167 |
|dim/competition_math |https://huggingface.co/datasets/dim/competition_math |Датасет олимпиадной математики на английском. The Mathematics Aptitude Test of Heuristics (MATH) dataset. |competition_math |https://huggingface.co/datasets/competition_math |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/competition_math |en |7500 |317.5254667 |267.8583731|34 |147 |234 |393 |3029 |
|dim/sharegpt_short_en_30k |https://huggingface.co/datasets/dim/sharegpt_short_en_30k |Короткие диалоги на английском из sharegpt |RyokoAI/ShareGPT52K |https://huggingface.co/datasets/RyokoAI/ShareGPT52K |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/sharegpt |en |29597 |749.3149981 |516.3702473|3 |336 |630 |1095 |2021 |
|dim/ru_turbo_alpaca_evol_instruct |https://huggingface.co/datasets/dim/ru_turbo_alpaca_evol_instruct |Набор инструкций различной тематики на русском языке, сгенерированных при помощи chatgpt. |IlyaGusev/ru_turbo_alpaca_evol_instruct |https://huggingface.co/datasets/IlyaGusev/ru_turbo_alpaca_evol_instruct |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/ru_turbo_alpaca_evol_instruct |ru |47793 |453.0887996 |289.5498356|17 |221 |430 |623 |4647 |
|dim/ru_turbo_saiga |https://huggingface.co/datasets/dim/ru_turbo_saiga |Набор инструкций различной тематики на русском языке, сгенерированных при помощи chatgpt. |IlyaGusev/ru_turbo_saiga |https://huggingface.co/datasets/IlyaGusev/ru_turbo_saiga |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/ru_turbo_saiga |ru |37699 |412.7508687 |113.346917 |87 |339 |398 |466 |1427 |
|dim/bugurt_completion_prompts |https://huggingface.co/datasets/dim/bugurt_completion_prompts |Обрезанные бугурты, где в качестве промпта используется строка вида - продолжи бугурт: первая строчка бугурта |https://t.me/bugurtthread |https://t.me/bugurtthread |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/bugurt_thread |ru |5000 |280.2466 |320.4353681|32 |111 |178 |331 |11333 |
|dim/tldr_17_50k |https://huggingface.co/datasets/dim/tldr_17_50k |Очень вольная абстрактная саммаризация постов с реддита в одну строчку |webis/tldr-17 |https://huggingface.co/datasets/webis/tldr-17 |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/tldr_17 |en |50000 |421.12752 |403.346214 |10 |177 |303 |525 |9592 |
|dim/grade_school_math_instructions |https://huggingface.co/datasets/dim/grade_school_math_instructions |OpenAI's grade-school-math датасет преобразованный в промпты. |qwedsacf/grade-school-math-instructions |https://huggingface.co/datasets/qwedsacf/grade-school-math-instructions |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/grade-school-math-instructions |en |8792 |171.6310282 |63.09232668|50 |124 |161 |206 |511 |
|dim/tldr_news |https://huggingface.co/datasets/dim/tldr_news |Хедлайны и текст новостей на различную тематику. |JulesBelveze/tldr_news |https://huggingface.co/datasets/JulesBelveze/tldr_news |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/tldr_news |en |7138 |133.1004483 |46.48736493|23 |100 |133 |161 |476 |
|dim/grade_school_math_instructions_ru|https://huggingface.co/datasets/dim/grade_school_math_instructions_ru|OpenAI's grade-school-math датасет переведенный на русский. |d0rj/gsm8k-ru |https://huggingface.co/datasets/d0rj/gsm8k-ru |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/grade_school_math_instructions_ru|ru |7473 |259.8321959 |100.1229127|78 |185 |241 |314 |838 |
|dim/dialogsum |https://huggingface.co/datasets/dim/dialogsum |Саммаризация диалогов на английском языке, разметка выполнялась вручную. |knkarthick/dialogsum |https://huggingface.co/datasets/knkarthick/dialogsum |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/dialogsum |en |12460 |269.6467095 |126.285664 |75 |191 |245 |327 |1725 |
|dim/HC3_ru |https://huggingface.co/datasets/dim/HC3_ru |Вопросы-ответы с реддита, есть ответы сгенерированные chatgpt и реальные ответы пользователей. Я использовал только реальные ответы пользователей. |d0rj/HC3-ru |https://huggingface.co/datasets/d0rj/HC3-ru |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/HC3_ru |ru |24322 |360.5608503 |330.2285903|15 |168 |267 |435 |10025 |
|dim/horoscopes_ru_10k |https://huggingface.co/datasets/dim/horoscopes_ru_10k |10k гороскопов, с промптами где я прошу сгенерировать гороском для определенного знака зодиака |dkagramanyan/horoscopes_ru |https://huggingface.co/datasets/dkagramanyan/horoscopes_ru |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/horoscopes_ru |ru |10000 |183.1443 |31.62023184|55 |159 |187 |201 |464 |
|dim/yandex_q_200k |https://huggingface.co/datasets/dim/yandex_q_200k |200k рандомно выбранных вопросов-ответов с сайта yandex q. |its5Q/yandex-q |https://huggingface.co/datasets/its5Q/yandex-q |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/yandex_q |ru |200000 |304.569005 |340.7808288|18 |127 |202 |353 |19294 |
|dim/leetcodesolutions_en_2k |https://huggingface.co/datasets/dim/leetcodesolutions_en_2k |Решения задач с leetcode на разных языках. |TigerResearch/tigerbot-kaggle-leetcodesolutions-en-2k |https://huggingface.co/datasets/TigerResearch/tigerbot-kaggle-leetcodesolutions-en-2k |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/leetcodesolutions_en_2k |en |2048 |740.7441406 |253.2493282|297 |565 |685 |857 |1960 |
|dim/forum_uristov_rf_prompts |https://huggingface.co/datasets/dim/forum_uristov_rf_prompts |Вопросы-ответы с российского юридического форума. |https://xn----dtbrojdkckkfj9k.xn--p1ai/vopros-yuristu?page=560|https://xn----dtbrojdkckkfj9k.xn--p1ai/vopros-yuristu?page=560 |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/forum_uristov_rf |ru |1849 |321.0540833 |429.58896 |31 |134 |210 |349 |6470 |
|dim/dialogsum_ru |https://huggingface.co/datasets/dim/dialogsum_ru |Саммаризация диалогов на русском языке, перевод dialogsum. |d0rj/dialogsum-ru |https://huggingface.co/datasets/d0rj/dialogsum-ru |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/dialogsum-ru |ru |12460 |364.2813804 |178.7117754|98 |250 |329 |446 |2300 |
|dim/huggingartists_prompts |https://huggingface.co/datasets/dim/huggingartists_prompts |Промпты, которые просят продолжить песню в стиле определенного исполнителя. В данном наборе содержатся почти все исполнители, которых вы можете найти в этой организации https://huggingface.co/huggingartists |https://huggingface.co/huggingartists |https://huggingface.co/huggingartists |https://github.com/dmitrymailk/verbalist/tree/master/verbalist/datasets/huggingartists |ru |64006 |561.6732025 |586.18458 |28 |297 |453 |720 |32949 |
### Модели
- [Ссылка на google sheets](https://docs.google.com/spreadsheets/d/1LGCy8RBR_Yk9wHRcp0IDV8eut4D7tAQBRgqCLqxYk3E/edit?usp=sharing)
|model_name |ru_cola(zero shot, prompt)|russian super glue(zero shot)|mmlu_ru(NLPCoreTeam)|mt_bench_ru_turn_1|mt_bench_ru_turn_2|mt_bench_ru_avg|mt_bench_generation|average |
|-----------------------------------------|--------------------------|-----------------------------|--------------------|------------------|------------------|---------------|-------------------|------------|
|IlyaGusev/saiga_mistral_7b_lora |0.710082526 |0.64 |0.4848747094 |0.6567901 |0.53375 |0.59527005 | |0.6050994671|
|verbalist_7b_v9_800 |0.6709723717 |0.665 |0.4801731633 |0.6175 |0.4375 |0.4375 | |0.574229107 |
|Open-Orca/Mistral-7B-OpenOrca |0.6917832795 |0.652 |0.4592925739 |0.685 |0.595 |0.64 | |0.6166151707|
|mistral-open-orca-ru-4600-step |0.6928597058 |0.663 |0.4347 |0.71625 |0.546 |0.631125 | |0.6105619412|
|verbalist_v10_1650 |0.7201291712 |0.66 |0.4920804261 |0.56125 |0.5 |0.530625 | |0.5866919194|
|gpt-3.5-turbo |0.72 |0.682 | |0.87 |0.745 |0.8075 | |0.75425 |
|openchat/openchat_3.5 |0.6727664155 |0.642 | | | |#DIV/0! | |0.6573832078|
|dim/tiny-llama-2T-open-orca-ru-10000-step|0.6361679225 |0.451 |0.2999564271 | | | | |0.4623747832|
- [dim/mistral-open-orca-ru-4600-step](https://huggingface.co/dim/mistral-open-orca-ru-4600-step)
- [dim/verbalist_7b_v9_800](https://huggingface.co/dim/verbalist_7b_v9_800)
- [dim/verbalist_v10_1650](https://huggingface.co/dim/verbalist_v10_1650)
### Код обучения
- [общий алгоритм обучения](https://github.com/dmitrymailk/verbalist/blob/master/verbalist/model/src/train.py)
- [формирование датасетов для обучения](https://github.com/dmitrymailk/verbalist/blob/master/verbalist/model/src/dataset.py#L176)
### Оборудование
Все обучение и инференс производится на видеокарте A100, на других видеокартах была обнаружена существенная деградация качества при инференсе, данный аспект требует дополнительного изучения.
- NVIDIA A100-SXM4-40GB
- NVIDIA-SMI 535.54.03
- Driver Version: 535.54.03
- CUDA Version: 12.2
- torch==2.0.1+cu118
### Дальнейшее развитие
Самое простое, что можно сделать это переводить уже имеющиеся хорошие датасеты с английского на русский при помощи GPT-4.
Более сложное это собирать больше разнообразных данных из различных доменов. Я могу лишь подкинуть идеи для того какие датасеты можно собрать еще.
- решебники по литературе, русскому и другим предметам
- задания со всяких бирж труда
- [краткие пересказы произведений, анализ произведений, сочинения по ним](http://www.litra.ru/shortwork/)
- [туториалы с digital ocean (более 7000)](https://www.digitalocean.com/community/tutorials)
- [туториалы с selectel](https://selectel.ru/blog/tutorials/)
- больше форумов на различные тематики
- [бесплатные эссе с ivypanda essays](https://ivypanda.com/essays/) и дальнейший их перевод на русский
- больше стихов и песен
- [олимпиадные русские задачи](https://math.ru/problems/) их очень сложно собирать, так как большинство их них живут только в PDF или docx. Но их довольно много и они довольно отличаются от олимпиадной математики на английском. Но у меня нет времени этим заниматься.
- фанфики на иностранном языке
- исправить текущие автоматические промпты на более разнообразные, при помощи chatgpt |
open-llm-leaderboard/details_ConvexAI__Harmony-4x7B-bf16 | ---
pretty_name: Evaluation run of ConvexAI/Harmony-4x7B-bf16
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [ConvexAI/Harmony-4x7B-bf16](https://huggingface.co/ConvexAI/Harmony-4x7B-bf16)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ConvexAI__Harmony-4x7B-bf16\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-02-02T02:08:05.844408](https://huggingface.co/datasets/open-llm-leaderboard/details_ConvexAI__Harmony-4x7B-bf16/blob/main/results_2024-02-02T02-08-05.844408.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6519815241664966,\n\
\ \"acc_stderr\": 0.03207888973753625,\n \"acc_norm\": 0.6516491536204881,\n\
\ \"acc_norm_stderr\": 0.03274551770079104,\n \"mc1\": 0.4602203182374541,\n\
\ \"mc1_stderr\": 0.01744801722396088,\n \"mc2\": 0.6205565135867297,\n\
\ \"mc2_stderr\": 0.015134580676846265\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.658703071672355,\n \"acc_stderr\": 0.013855831287497724,\n\
\ \"acc_norm\": 0.6834470989761092,\n \"acc_norm_stderr\": 0.013592431519068077\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6810396335391357,\n\
\ \"acc_stderr\": 0.004651211311633843,\n \"acc_norm\": 0.8674566819358693,\n\
\ \"acc_norm_stderr\": 0.0033838751726700243\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n\
\ \"acc_stderr\": 0.04153948404742398,\n \"acc_norm\": 0.6370370370370371,\n\
\ \"acc_norm_stderr\": 0.04153948404742398\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.037150621549989056,\n\
\ \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.037150621549989056\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n\
\ \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \
\ \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.02804918631569525,\n\
\ \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.02804918631569525\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n\
\ \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n\
\ \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \
\ \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n\
\ \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \
\ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6763005780346821,\n\
\ \"acc_stderr\": 0.0356760379963917,\n \"acc_norm\": 0.6763005780346821,\n\
\ \"acc_norm_stderr\": 0.0356760379963917\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.04897104952726366,\n\
\ \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.04897104952726366\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.76,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.76,\n\
\ \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n\
\ \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n\
\ \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n\
\ \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n\
\ \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.42857142857142855,\n \"acc_stderr\": 0.025487187147859375,\n \"\
acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.025487187147859375\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n\
\ \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n\
\ \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7709677419354839,\n\
\ \"acc_stderr\": 0.02390491431178265,\n \"acc_norm\": 0.7709677419354839,\n\
\ \"acc_norm_stderr\": 0.02390491431178265\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.47783251231527096,\n \"acc_stderr\": 0.03514528562175008,\n\
\ \"acc_norm\": 0.47783251231527096,\n \"acc_norm_stderr\": 0.03514528562175008\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\"\
: 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.031922715695483,\n\
\ \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.031922715695483\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586815,\n \"\
acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586815\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n\
\ \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402534,\n\
\ \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402534\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.37777777777777777,\n \"acc_stderr\": 0.02956070739246572,\n \
\ \"acc_norm\": 0.37777777777777777,\n \"acc_norm_stderr\": 0.02956070739246572\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6974789915966386,\n \"acc_stderr\": 0.02983796238829193,\n \
\ \"acc_norm\": 0.6974789915966386,\n \"acc_norm_stderr\": 0.02983796238829193\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"\
acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"\
acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5370370370370371,\n \"acc_stderr\": 0.03400603625538272,\n \"\
acc_norm\": 0.5370370370370371,\n \"acc_norm_stderr\": 0.03400603625538272\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.8186274509803921,\n \"acc_stderr\": 0.027044621719474086,\n \"\
acc_norm\": 0.8186274509803921,\n \"acc_norm_stderr\": 0.027044621719474086\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.8016877637130801,\n \"acc_stderr\": 0.02595502084162113,\n \
\ \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.02595502084162113\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n\
\ \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n\
\ \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.8091603053435115,\n \"acc_stderr\": 0.034465133507525995,\n\
\ \"acc_norm\": 0.8091603053435115,\n \"acc_norm_stderr\": 0.034465133507525995\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"\
acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n\
\ \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n\
\ \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n\
\ \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n\
\ \"acc_stderr\": 0.047184714852195886,\n \"acc_norm\": 0.44642857142857145,\n\
\ \"acc_norm_stderr\": 0.047184714852195886\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384495,\n\
\ \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384495\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n\
\ \"acc_stderr\": 0.021586494001281376,\n \"acc_norm\": 0.8760683760683761,\n\
\ \"acc_norm_stderr\": 0.021586494001281376\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \
\ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8301404853128991,\n\
\ \"acc_stderr\": 0.013428186370608304,\n \"acc_norm\": 0.8301404853128991,\n\
\ \"acc_norm_stderr\": 0.013428186370608304\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7341040462427746,\n \"acc_stderr\": 0.02378620325550829,\n\
\ \"acc_norm\": 0.7341040462427746,\n \"acc_norm_stderr\": 0.02378620325550829\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4011173184357542,\n\
\ \"acc_stderr\": 0.01639222189940708,\n \"acc_norm\": 0.4011173184357542,\n\
\ \"acc_norm_stderr\": 0.01639222189940708\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.738562091503268,\n \"acc_stderr\": 0.025160998214292452,\n\
\ \"acc_norm\": 0.738562091503268,\n \"acc_norm_stderr\": 0.025160998214292452\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n\
\ \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n\
\ \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7530864197530864,\n \"acc_stderr\": 0.02399350170904211,\n\
\ \"acc_norm\": 0.7530864197530864,\n \"acc_norm_stderr\": 0.02399350170904211\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \
\ \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46870925684485004,\n\
\ \"acc_stderr\": 0.01274520462608314,\n \"acc_norm\": 0.46870925684485004,\n\
\ \"acc_norm_stderr\": 0.01274520462608314\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6948529411764706,\n \"acc_stderr\": 0.0279715413701706,\n\
\ \"acc_norm\": 0.6948529411764706,\n \"acc_norm_stderr\": 0.0279715413701706\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6830065359477124,\n \"acc_stderr\": 0.018824219512706207,\n \
\ \"acc_norm\": 0.6830065359477124,\n \"acc_norm_stderr\": 0.018824219512706207\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n\
\ \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n\
\ \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n\
\ \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n\
\ \"acc_stderr\": 0.02553843336857833,\n \"acc_norm\": 0.845771144278607,\n\
\ \"acc_norm_stderr\": 0.02553843336857833\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \
\ \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n\
\ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n\
\ \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n\
\ \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n\
\ \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4602203182374541,\n\
\ \"mc1_stderr\": 0.01744801722396088,\n \"mc2\": 0.6205565135867297,\n\
\ \"mc2_stderr\": 0.015134580676846265\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.813733228097869,\n \"acc_stderr\": 0.01094187795567621\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7210007581501138,\n \
\ \"acc_stderr\": 0.012354115779970308\n }\n}\n```"
repo_url: https://huggingface.co/ConvexAI/Harmony-4x7B-bf16
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|arc:challenge|25_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|gsm8k|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hellaswag|10_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-02T02-08-05.844408.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-02T02-08-05.844408.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- '**/details_harness|winogrande|5_2024-02-02T02-08-05.844408.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-02-02T02-08-05.844408.parquet'
- config_name: results
data_files:
- split: 2024_02_02T02_08_05.844408
path:
- results_2024-02-02T02-08-05.844408.parquet
- split: latest
path:
- results_2024-02-02T02-08-05.844408.parquet
---
# Dataset Card for Evaluation run of ConvexAI/Harmony-4x7B-bf16
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [ConvexAI/Harmony-4x7B-bf16](https://huggingface.co/ConvexAI/Harmony-4x7B-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_ConvexAI__Harmony-4x7B-bf16",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-02T02:08:05.844408](https://huggingface.co/datasets/open-llm-leaderboard/details_ConvexAI__Harmony-4x7B-bf16/blob/main/results_2024-02-02T02-08-05.844408.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6519815241664966,
"acc_stderr": 0.03207888973753625,
"acc_norm": 0.6516491536204881,
"acc_norm_stderr": 0.03274551770079104,
"mc1": 0.4602203182374541,
"mc1_stderr": 0.01744801722396088,
"mc2": 0.6205565135867297,
"mc2_stderr": 0.015134580676846265
},
"harness|arc:challenge|25": {
"acc": 0.658703071672355,
"acc_stderr": 0.013855831287497724,
"acc_norm": 0.6834470989761092,
"acc_norm_stderr": 0.013592431519068077
},
"harness|hellaswag|10": {
"acc": 0.6810396335391357,
"acc_stderr": 0.004651211311633843,
"acc_norm": 0.8674566819358693,
"acc_norm_stderr": 0.0033838751726700243
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6370370370370371,
"acc_stderr": 0.04153948404742398,
"acc_norm": 0.6370370370370371,
"acc_norm_stderr": 0.04153948404742398
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7039473684210527,
"acc_stderr": 0.037150621549989056,
"acc_norm": 0.7039473684210527,
"acc_norm_stderr": 0.037150621549989056
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.62,
"acc_stderr": 0.048783173121456316,
"acc_norm": 0.62,
"acc_norm_stderr": 0.048783173121456316
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7056603773584905,
"acc_stderr": 0.02804918631569525,
"acc_norm": 0.7056603773584905,
"acc_norm_stderr": 0.02804918631569525
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.03476590104304134,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.03476590104304134
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6763005780346821,
"acc_stderr": 0.0356760379963917,
"acc_norm": 0.6763005780346821,
"acc_norm_stderr": 0.0356760379963917
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4117647058823529,
"acc_stderr": 0.04897104952726366,
"acc_norm": 0.4117647058823529,
"acc_norm_stderr": 0.04897104952726366
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.04292346959909282,
"acc_norm": 0.76,
"acc_norm_stderr": 0.04292346959909282
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5829787234042553,
"acc_stderr": 0.03223276266711712,
"acc_norm": 0.5829787234042553,
"acc_norm_stderr": 0.03223276266711712
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.04702880432049615,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.04702880432049615
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5379310344827586,
"acc_stderr": 0.04154659671707548,
"acc_norm": 0.5379310344827586,
"acc_norm_stderr": 0.04154659671707548
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.42857142857142855,
"acc_stderr": 0.025487187147859375,
"acc_norm": 0.42857142857142855,
"acc_norm_stderr": 0.025487187147859375
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.46825396825396826,
"acc_stderr": 0.04463112720677172,
"acc_norm": 0.46825396825396826,
"acc_norm_stderr": 0.04463112720677172
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7709677419354839,
"acc_stderr": 0.02390491431178265,
"acc_norm": 0.7709677419354839,
"acc_norm_stderr": 0.02390491431178265
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.47783251231527096,
"acc_stderr": 0.03514528562175008,
"acc_norm": 0.47783251231527096,
"acc_norm_stderr": 0.03514528562175008
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7878787878787878,
"acc_stderr": 0.031922715695483,
"acc_norm": 0.7878787878787878,
"acc_norm_stderr": 0.031922715695483
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7878787878787878,
"acc_stderr": 0.029126522834586815,
"acc_norm": 0.7878787878787878,
"acc_norm_stderr": 0.029126522834586815
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9015544041450777,
"acc_stderr": 0.021500249576033456,
"acc_norm": 0.9015544041450777,
"acc_norm_stderr": 0.021500249576033456
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.023901157979402534,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.023901157979402534
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.37777777777777777,
"acc_stderr": 0.02956070739246572,
"acc_norm": 0.37777777777777777,
"acc_norm_stderr": 0.02956070739246572
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6974789915966386,
"acc_stderr": 0.02983796238829193,
"acc_norm": 0.6974789915966386,
"acc_norm_stderr": 0.02983796238829193
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.33774834437086093,
"acc_stderr": 0.03861557546255169,
"acc_norm": 0.33774834437086093,
"acc_norm_stderr": 0.03861557546255169
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8477064220183487,
"acc_stderr": 0.015405084393157074,
"acc_norm": 0.8477064220183487,
"acc_norm_stderr": 0.015405084393157074
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5370370370370371,
"acc_stderr": 0.03400603625538272,
"acc_norm": 0.5370370370370371,
"acc_norm_stderr": 0.03400603625538272
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8186274509803921,
"acc_stderr": 0.027044621719474086,
"acc_norm": 0.8186274509803921,
"acc_norm_stderr": 0.027044621719474086
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8016877637130801,
"acc_stderr": 0.02595502084162113,
"acc_norm": 0.8016877637130801,
"acc_norm_stderr": 0.02595502084162113
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6905829596412556,
"acc_stderr": 0.03102441174057221,
"acc_norm": 0.6905829596412556,
"acc_norm_stderr": 0.03102441174057221
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8091603053435115,
"acc_stderr": 0.034465133507525995,
"acc_norm": 0.8091603053435115,
"acc_norm_stderr": 0.034465133507525995
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.03695980128098824,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.03695980128098824
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7870370370370371,
"acc_stderr": 0.0395783547198098,
"acc_norm": 0.7870370370370371,
"acc_norm_stderr": 0.0395783547198098
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7668711656441718,
"acc_stderr": 0.0332201579577674,
"acc_norm": 0.7668711656441718,
"acc_norm_stderr": 0.0332201579577674
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.44642857142857145,
"acc_stderr": 0.047184714852195886,
"acc_norm": 0.44642857142857145,
"acc_norm_stderr": 0.047184714852195886
},
"harness|hendrycksTest-management|5": {
"acc": 0.7572815533980582,
"acc_stderr": 0.04245022486384495,
"acc_norm": 0.7572815533980582,
"acc_norm_stderr": 0.04245022486384495
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8760683760683761,
"acc_stderr": 0.021586494001281376,
"acc_norm": 0.8760683760683761,
"acc_norm_stderr": 0.021586494001281376
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8301404853128991,
"acc_stderr": 0.013428186370608304,
"acc_norm": 0.8301404853128991,
"acc_norm_stderr": 0.013428186370608304
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7341040462427746,
"acc_stderr": 0.02378620325550829,
"acc_norm": 0.7341040462427746,
"acc_norm_stderr": 0.02378620325550829
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4011173184357542,
"acc_stderr": 0.01639222189940708,
"acc_norm": 0.4011173184357542,
"acc_norm_stderr": 0.01639222189940708
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.738562091503268,
"acc_stderr": 0.025160998214292452,
"acc_norm": 0.738562091503268,
"acc_norm_stderr": 0.025160998214292452
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.707395498392283,
"acc_stderr": 0.02583989833487798,
"acc_norm": 0.707395498392283,
"acc_norm_stderr": 0.02583989833487798
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7530864197530864,
"acc_stderr": 0.02399350170904211,
"acc_norm": 0.7530864197530864,
"acc_norm_stderr": 0.02399350170904211
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4858156028368794,
"acc_stderr": 0.02981549448368206,
"acc_norm": 0.4858156028368794,
"acc_norm_stderr": 0.02981549448368206
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.46870925684485004,
"acc_stderr": 0.01274520462608314,
"acc_norm": 0.46870925684485004,
"acc_norm_stderr": 0.01274520462608314
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6948529411764706,
"acc_stderr": 0.0279715413701706,
"acc_norm": 0.6948529411764706,
"acc_norm_stderr": 0.0279715413701706
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6830065359477124,
"acc_stderr": 0.018824219512706207,
"acc_norm": 0.6830065359477124,
"acc_norm_stderr": 0.018824219512706207
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6636363636363637,
"acc_stderr": 0.04525393596302506,
"acc_norm": 0.6636363636363637,
"acc_norm_stderr": 0.04525393596302506
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7306122448979592,
"acc_stderr": 0.02840125202902294,
"acc_norm": 0.7306122448979592,
"acc_norm_stderr": 0.02840125202902294
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.845771144278607,
"acc_stderr": 0.02553843336857833,
"acc_norm": 0.845771144278607,
"acc_norm_stderr": 0.02553843336857833
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.0358870281282637,
"acc_norm": 0.85,
"acc_norm_stderr": 0.0358870281282637
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5542168674698795,
"acc_stderr": 0.03869543323472101,
"acc_norm": 0.5542168674698795,
"acc_norm_stderr": 0.03869543323472101
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8362573099415205,
"acc_stderr": 0.028380919596145866,
"acc_norm": 0.8362573099415205,
"acc_norm_stderr": 0.028380919596145866
},
"harness|truthfulqa:mc|0": {
"mc1": 0.4602203182374541,
"mc1_stderr": 0.01744801722396088,
"mc2": 0.6205565135867297,
"mc2_stderr": 0.015134580676846265
},
"harness|winogrande|5": {
"acc": 0.813733228097869,
"acc_stderr": 0.01094187795567621
},
"harness|gsm8k|5": {
"acc": 0.7210007581501138,
"acc_stderr": 0.012354115779970308
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
SEACrowd/id_abusive | ---
tags:
- sentiment-analysis
language:
- ind
---
# id_abusive
The ID_ABUSIVE dataset is collection of 2,016 informal abusive tweets in Indonesian language,
designed for sentiment analysis NLP task. This dataset is crawled from Twitter, and then filtered
and labelled manually by 20 volunteer annotators. The dataset labelled into three labels namely
not abusive language, abusive but not offensive, and offensive language.
## Dataset Usage
Run `pip install nusacrowd` before loading the dataset through HuggingFace's `load_dataset`.
## Citation
```
@article{IBROHIM2018222,
title = {A Dataset and Preliminaries Study for Abusive Language Detection in Indonesian Social Media},
journal = {Procedia Computer Science},
volume = {135},
pages = {222-229},
year = {2018},
note = {The 3rd International Conference on Computer Science and Computational Intelligence (ICCSCI 2018) : Empowering Smart Technology in Digital Era for a Better Life},
issn = {1877-0509},
doi = {https://doi.org/10.1016/j.procs.2018.08.169},
url = {https://www.sciencedirect.com/science/article/pii/S1877050918314583},
author = {Muhammad Okky Ibrohim and Indra Budi},
keywords = {abusive language, twitter, machine learning},
abstract = {Abusive language is an expression (both oral or text) that contains abusive/dirty words or phrases both in the context of jokes, a vulgar sex conservation or to cursing someone. Nowadays many people on the internet (netizens) write and post an abusive language in the social media such as Facebook, Line, Twitter, etc. Detecting an abusive language in social media is a difficult problem to resolve because this problem can not be resolved just use word matching. This paper discusses a preliminaries study for abusive language detection in Indonesian social media and the challenge in developing a system for Indonesian abusive language detection, especially in social media. We also built reported an experiment for abusive language detection on Indonesian tweet using machine learning approach with a simple word n-gram and char n-gram features. We use Naive Bayes, Support Vector Machine, and Random Forest Decision Tree classifier to identify the tweet whether the tweet is a not abusive language, abusive but not offensive, or offensive language. The experiment results show that the Naive Bayes classifier with the combination of word unigram + bigrams features gives the best result i.e. 70.06% of F1 - Score. However, if we classifying the tweet into two labels only (not abusive language and abusive language), all classifier that we used gives a higher result (more than 83% of F1 - Score for every classifier). The dataset in this experiment is available for other researchers that interest to improved this study.}
}
```
## License
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0 International
## Homepage
[https://www.sciencedirect.com/science/article/pii/S1877050918314583](https://www.sciencedirect.com/science/article/pii/S1877050918314583)
### NusaCatalogue
For easy indexing and metadata: [https://indonlp.github.io/nusa-catalogue](https://indonlp.github.io/nusa-catalogue) |
open-llm-leaderboard/details_FPHam__Sydney_Overthinker_13b_HF | ---
pretty_name: Evaluation run of FPHam/Sydney_Overthinker_13b_HF
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [FPHam/Sydney_Overthinker_13b_HF](https://huggingface.co/FPHam/Sydney_Overthinker_13b_HF)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_FPHam__Sydney_Overthinker_13b_HF\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-12-08T02:51:52.068469](https://huggingface.co/datasets/open-llm-leaderboard/details_FPHam__Sydney_Overthinker_13b_HF/blob/main/results_2023-12-08T02-51-52.068469.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5136287950246363,\n\
\ \"acc_stderr\": 0.034087739996992605,\n \"acc_norm\": 0.5191163704808761,\n\
\ \"acc_norm_stderr\": 0.03483348555557295,\n \"mc1\": 0.2998776009791922,\n\
\ \"mc1_stderr\": 0.01604035296671362,\n \"mc2\": 0.45697851910783077,\n\
\ \"mc2_stderr\": 0.015427158150833389\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5614334470989761,\n \"acc_stderr\": 0.014500682618212865,\n\
\ \"acc_norm\": 0.5895904436860068,\n \"acc_norm_stderr\": 0.014374922192642664\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6118303126867158,\n\
\ \"acc_stderr\": 0.004863375698153865,\n \"acc_norm\": 0.8085042820155347,\n\
\ \"acc_norm_stderr\": 0.0039267405951797715\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \
\ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4888888888888889,\n\
\ \"acc_stderr\": 0.04318275491977976,\n \"acc_norm\": 0.4888888888888889,\n\
\ \"acc_norm_stderr\": 0.04318275491977976\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.47368421052631576,\n \"acc_stderr\": 0.04063302731486671,\n\
\ \"acc_norm\": 0.47368421052631576,\n \"acc_norm_stderr\": 0.04063302731486671\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.5,\n\
\ \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \
\ \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.5132075471698113,\n \"acc_stderr\": 0.030762134874500476,\n\
\ \"acc_norm\": 0.5132075471698113,\n \"acc_norm_stderr\": 0.030762134874500476\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5972222222222222,\n\
\ \"acc_stderr\": 0.04101405519842425,\n \"acc_norm\": 0.5972222222222222,\n\
\ \"acc_norm_stderr\": 0.04101405519842425\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \
\ \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n\
\ \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \
\ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621505\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4797687861271676,\n\
\ \"acc_stderr\": 0.03809342081273957,\n \"acc_norm\": 0.4797687861271676,\n\
\ \"acc_norm_stderr\": 0.03809342081273957\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.0433643270799318,\n\
\ \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.0433643270799318\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n\
\ \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.4553191489361702,\n \"acc_stderr\": 0.03255525359340355,\n\
\ \"acc_norm\": 0.4553191489361702,\n \"acc_norm_stderr\": 0.03255525359340355\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n\
\ \"acc_stderr\": 0.041424397194893624,\n \"acc_norm\": 0.2631578947368421,\n\
\ \"acc_norm_stderr\": 0.041424397194893624\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.46206896551724136,\n \"acc_stderr\": 0.041546596717075474,\n\
\ \"acc_norm\": 0.46206896551724136,\n \"acc_norm_stderr\": 0.041546596717075474\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.2962962962962963,\n \"acc_stderr\": 0.023517294335963286,\n \"\
acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.023517294335963286\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.35714285714285715,\n\
\ \"acc_stderr\": 0.042857142857142816,\n \"acc_norm\": 0.35714285714285715,\n\
\ \"acc_norm_stderr\": 0.042857142857142816\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \
\ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5870967741935483,\n\
\ \"acc_stderr\": 0.028009138125400387,\n \"acc_norm\": 0.5870967741935483,\n\
\ \"acc_norm_stderr\": 0.028009138125400387\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.3399014778325123,\n \"acc_stderr\": 0.033327690684107895,\n\
\ \"acc_norm\": 0.3399014778325123,\n \"acc_norm_stderr\": 0.033327690684107895\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\"\
: 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.6181818181818182,\n \"acc_stderr\": 0.03793713171165634,\n\
\ \"acc_norm\": 0.6181818181818182,\n \"acc_norm_stderr\": 0.03793713171165634\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.6616161616161617,\n \"acc_stderr\": 0.033711241426263014,\n \"\
acc_norm\": 0.6616161616161617,\n \"acc_norm_stderr\": 0.033711241426263014\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.7409326424870466,\n \"acc_stderr\": 0.0316187791793541,\n\
\ \"acc_norm\": 0.7409326424870466,\n \"acc_norm_stderr\": 0.0316187791793541\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.5230769230769231,\n \"acc_stderr\": 0.025323990861736232,\n\
\ \"acc_norm\": 0.5230769230769231,\n \"acc_norm_stderr\": 0.025323990861736232\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.28888888888888886,\n \"acc_stderr\": 0.027634907264178544,\n \
\ \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.027634907264178544\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.46218487394957986,\n \"acc_stderr\": 0.0323854694875898,\n \
\ \"acc_norm\": 0.46218487394957986,\n \"acc_norm_stderr\": 0.0323854694875898\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.25165562913907286,\n \"acc_stderr\": 0.035433042343899844,\n \"\
acc_norm\": 0.25165562913907286,\n \"acc_norm_stderr\": 0.035433042343899844\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.6862385321100918,\n \"acc_stderr\": 0.019894723341469116,\n \"\
acc_norm\": 0.6862385321100918,\n \"acc_norm_stderr\": 0.019894723341469116\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.33796296296296297,\n \"acc_stderr\": 0.03225941352631295,\n \"\
acc_norm\": 0.33796296296296297,\n \"acc_norm_stderr\": 0.03225941352631295\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.6323529411764706,\n \"acc_stderr\": 0.03384132045674118,\n \"\
acc_norm\": 0.6323529411764706,\n \"acc_norm_stderr\": 0.03384132045674118\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.6708860759493671,\n \"acc_stderr\": 0.030587326294702368,\n \
\ \"acc_norm\": 0.6708860759493671,\n \"acc_norm_stderr\": 0.030587326294702368\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6412556053811659,\n\
\ \"acc_stderr\": 0.03219079200419995,\n \"acc_norm\": 0.6412556053811659,\n\
\ \"acc_norm_stderr\": 0.03219079200419995\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.5954198473282443,\n \"acc_stderr\": 0.043046937953806645,\n\
\ \"acc_norm\": 0.5954198473282443,\n \"acc_norm_stderr\": 0.043046937953806645\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7107438016528925,\n \"acc_stderr\": 0.04139112727635463,\n \"\
acc_norm\": 0.7107438016528925,\n \"acc_norm_stderr\": 0.04139112727635463\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6666666666666666,\n\
\ \"acc_stderr\": 0.04557239513497751,\n \"acc_norm\": 0.6666666666666666,\n\
\ \"acc_norm_stderr\": 0.04557239513497751\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.5705521472392638,\n \"acc_stderr\": 0.03889066619112722,\n\
\ \"acc_norm\": 0.5705521472392638,\n \"acc_norm_stderr\": 0.03889066619112722\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n\
\ \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n\
\ \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280041,\n\
\ \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280041\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7948717948717948,\n\
\ \"acc_stderr\": 0.026453508054040335,\n \"acc_norm\": 0.7948717948717948,\n\
\ \"acc_norm_stderr\": 0.026453508054040335\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.56,\n \"acc_stderr\": 0.0498887651569859,\n \
\ \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.0498887651569859\n },\n\
\ \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7203065134099617,\n\
\ \"acc_stderr\": 0.016050792148036522,\n \"acc_norm\": 0.7203065134099617,\n\
\ \"acc_norm_stderr\": 0.016050792148036522\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.5809248554913294,\n \"acc_stderr\": 0.026564178111422622,\n\
\ \"acc_norm\": 0.5809248554913294,\n \"acc_norm_stderr\": 0.026564178111422622\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.32737430167597764,\n\
\ \"acc_stderr\": 0.015694238967737383,\n \"acc_norm\": 0.32737430167597764,\n\
\ \"acc_norm_stderr\": 0.015694238967737383\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.5490196078431373,\n \"acc_stderr\": 0.028491993586171566,\n\
\ \"acc_norm\": 0.5490196078431373,\n \"acc_norm_stderr\": 0.028491993586171566\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6045016077170418,\n\
\ \"acc_stderr\": 0.02777091853142784,\n \"acc_norm\": 0.6045016077170418,\n\
\ \"acc_norm_stderr\": 0.02777091853142784\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.6080246913580247,\n \"acc_stderr\": 0.027163686038271146,\n\
\ \"acc_norm\": 0.6080246913580247,\n \"acc_norm_stderr\": 0.027163686038271146\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.3723404255319149,\n \"acc_stderr\": 0.02883892147125146,\n \
\ \"acc_norm\": 0.3723404255319149,\n \"acc_norm_stderr\": 0.02883892147125146\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3963494132985658,\n\
\ \"acc_stderr\": 0.012492830452095217,\n \"acc_norm\": 0.3963494132985658,\n\
\ \"acc_norm_stderr\": 0.012492830452095217\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.03016191193076711,\n\
\ \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.03016191193076711\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.5163398692810458,\n \"acc_stderr\": 0.02021703065318646,\n \
\ \"acc_norm\": 0.5163398692810458,\n \"acc_norm_stderr\": 0.02021703065318646\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6090909090909091,\n\
\ \"acc_stderr\": 0.04673752333670239,\n \"acc_norm\": 0.6090909090909091,\n\
\ \"acc_norm_stderr\": 0.04673752333670239\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.6081632653061224,\n \"acc_stderr\": 0.031251275910891656,\n\
\ \"acc_norm\": 0.6081632653061224,\n \"acc_norm_stderr\": 0.031251275910891656\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6467661691542289,\n\
\ \"acc_stderr\": 0.03379790611796777,\n \"acc_norm\": 0.6467661691542289,\n\
\ \"acc_norm_stderr\": 0.03379790611796777\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \
\ \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.42168674698795183,\n\
\ \"acc_stderr\": 0.03844453181770917,\n \"acc_norm\": 0.42168674698795183,\n\
\ \"acc_norm_stderr\": 0.03844453181770917\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.7134502923976608,\n \"acc_stderr\": 0.03467826685703826,\n\
\ \"acc_norm\": 0.7134502923976608,\n \"acc_norm_stderr\": 0.03467826685703826\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2998776009791922,\n\
\ \"mc1_stderr\": 0.01604035296671362,\n \"mc2\": 0.45697851910783077,\n\
\ \"mc2_stderr\": 0.015427158150833389\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.739542225730071,\n \"acc_stderr\": 0.012334833671998297\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.18877937831690675,\n \
\ \"acc_stderr\": 0.010779262837202751\n }\n}\n```"
repo_url: https://huggingface.co/FPHam/Sydney_Overthinker_13b_HF
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|arc:challenge|25_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|gsm8k|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hellaswag|10_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-12-08T02-51-52.068469.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-management|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-virology|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|truthfulqa:mc|0_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-12-08T02-51-52.068469.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- '**/details_harness|winogrande|5_2023-12-08T02-51-52.068469.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-12-08T02-51-52.068469.parquet'
- config_name: results
data_files:
- split: 2023_12_08T02_51_52.068469
path:
- results_2023-12-08T02-51-52.068469.parquet
- split: latest
path:
- results_2023-12-08T02-51-52.068469.parquet
---
# Dataset Card for Evaluation run of FPHam/Sydney_Overthinker_13b_HF
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/FPHam/Sydney_Overthinker_13b_HF
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [FPHam/Sydney_Overthinker_13b_HF](https://huggingface.co/FPHam/Sydney_Overthinker_13b_HF) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_FPHam__Sydney_Overthinker_13b_HF",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-12-08T02:51:52.068469](https://huggingface.co/datasets/open-llm-leaderboard/details_FPHam__Sydney_Overthinker_13b_HF/blob/main/results_2023-12-08T02-51-52.068469.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.5136287950246363,
"acc_stderr": 0.034087739996992605,
"acc_norm": 0.5191163704808761,
"acc_norm_stderr": 0.03483348555557295,
"mc1": 0.2998776009791922,
"mc1_stderr": 0.01604035296671362,
"mc2": 0.45697851910783077,
"mc2_stderr": 0.015427158150833389
},
"harness|arc:challenge|25": {
"acc": 0.5614334470989761,
"acc_stderr": 0.014500682618212865,
"acc_norm": 0.5895904436860068,
"acc_norm_stderr": 0.014374922192642664
},
"harness|hellaswag|10": {
"acc": 0.6118303126867158,
"acc_stderr": 0.004863375698153865,
"acc_norm": 0.8085042820155347,
"acc_norm_stderr": 0.0039267405951797715
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.4888888888888889,
"acc_stderr": 0.04318275491977976,
"acc_norm": 0.4888888888888889,
"acc_norm_stderr": 0.04318275491977976
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.47368421052631576,
"acc_stderr": 0.04063302731486671,
"acc_norm": 0.47368421052631576,
"acc_norm_stderr": 0.04063302731486671
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.5132075471698113,
"acc_stderr": 0.030762134874500476,
"acc_norm": 0.5132075471698113,
"acc_norm_stderr": 0.030762134874500476
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.5972222222222222,
"acc_stderr": 0.04101405519842425,
"acc_norm": 0.5972222222222222,
"acc_norm_stderr": 0.04101405519842425
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.39,
"acc_stderr": 0.04902071300001974,
"acc_norm": 0.39,
"acc_norm_stderr": 0.04902071300001974
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621505,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621505
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.4797687861271676,
"acc_stderr": 0.03809342081273957,
"acc_norm": 0.4797687861271676,
"acc_norm_stderr": 0.03809342081273957
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.0433643270799318,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.0433643270799318
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.61,
"acc_stderr": 0.04902071300001975,
"acc_norm": 0.61,
"acc_norm_stderr": 0.04902071300001975
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.4553191489361702,
"acc_stderr": 0.03255525359340355,
"acc_norm": 0.4553191489361702,
"acc_norm_stderr": 0.03255525359340355
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.2631578947368421,
"acc_stderr": 0.041424397194893624,
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.041424397194893624
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.46206896551724136,
"acc_stderr": 0.041546596717075474,
"acc_norm": 0.46206896551724136,
"acc_norm_stderr": 0.041546596717075474
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.023517294335963286,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.023517294335963286
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.35714285714285715,
"acc_stderr": 0.042857142857142816,
"acc_norm": 0.35714285714285715,
"acc_norm_stderr": 0.042857142857142816
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.5870967741935483,
"acc_stderr": 0.028009138125400387,
"acc_norm": 0.5870967741935483,
"acc_norm_stderr": 0.028009138125400387
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.3399014778325123,
"acc_stderr": 0.033327690684107895,
"acc_norm": 0.3399014778325123,
"acc_norm_stderr": 0.033327690684107895
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.6181818181818182,
"acc_stderr": 0.03793713171165634,
"acc_norm": 0.6181818181818182,
"acc_norm_stderr": 0.03793713171165634
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.6616161616161617,
"acc_stderr": 0.033711241426263014,
"acc_norm": 0.6616161616161617,
"acc_norm_stderr": 0.033711241426263014
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.7409326424870466,
"acc_stderr": 0.0316187791793541,
"acc_norm": 0.7409326424870466,
"acc_norm_stderr": 0.0316187791793541
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.5230769230769231,
"acc_stderr": 0.025323990861736232,
"acc_norm": 0.5230769230769231,
"acc_norm_stderr": 0.025323990861736232
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.28888888888888886,
"acc_stderr": 0.027634907264178544,
"acc_norm": 0.28888888888888886,
"acc_norm_stderr": 0.027634907264178544
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.46218487394957986,
"acc_stderr": 0.0323854694875898,
"acc_norm": 0.46218487394957986,
"acc_norm_stderr": 0.0323854694875898
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.25165562913907286,
"acc_stderr": 0.035433042343899844,
"acc_norm": 0.25165562913907286,
"acc_norm_stderr": 0.035433042343899844
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.6862385321100918,
"acc_stderr": 0.019894723341469116,
"acc_norm": 0.6862385321100918,
"acc_norm_stderr": 0.019894723341469116
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.33796296296296297,
"acc_stderr": 0.03225941352631295,
"acc_norm": 0.33796296296296297,
"acc_norm_stderr": 0.03225941352631295
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.6323529411764706,
"acc_stderr": 0.03384132045674118,
"acc_norm": 0.6323529411764706,
"acc_norm_stderr": 0.03384132045674118
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.6708860759493671,
"acc_stderr": 0.030587326294702368,
"acc_norm": 0.6708860759493671,
"acc_norm_stderr": 0.030587326294702368
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6412556053811659,
"acc_stderr": 0.03219079200419995,
"acc_norm": 0.6412556053811659,
"acc_norm_stderr": 0.03219079200419995
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.5954198473282443,
"acc_stderr": 0.043046937953806645,
"acc_norm": 0.5954198473282443,
"acc_norm_stderr": 0.043046937953806645
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7107438016528925,
"acc_stderr": 0.04139112727635463,
"acc_norm": 0.7107438016528925,
"acc_norm_stderr": 0.04139112727635463
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.04557239513497751,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.04557239513497751
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.5705521472392638,
"acc_stderr": 0.03889066619112722,
"acc_norm": 0.5705521472392638,
"acc_norm_stderr": 0.03889066619112722
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.3125,
"acc_stderr": 0.043994650575715215,
"acc_norm": 0.3125,
"acc_norm_stderr": 0.043994650575715215
},
"harness|hendrycksTest-management|5": {
"acc": 0.6796116504854369,
"acc_stderr": 0.04620284082280041,
"acc_norm": 0.6796116504854369,
"acc_norm_stderr": 0.04620284082280041
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.7948717948717948,
"acc_stderr": 0.026453508054040335,
"acc_norm": 0.7948717948717948,
"acc_norm_stderr": 0.026453508054040335
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.56,
"acc_stderr": 0.0498887651569859,
"acc_norm": 0.56,
"acc_norm_stderr": 0.0498887651569859
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7203065134099617,
"acc_stderr": 0.016050792148036522,
"acc_norm": 0.7203065134099617,
"acc_norm_stderr": 0.016050792148036522
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.5809248554913294,
"acc_stderr": 0.026564178111422622,
"acc_norm": 0.5809248554913294,
"acc_norm_stderr": 0.026564178111422622
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.32737430167597764,
"acc_stderr": 0.015694238967737383,
"acc_norm": 0.32737430167597764,
"acc_norm_stderr": 0.015694238967737383
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.5490196078431373,
"acc_stderr": 0.028491993586171566,
"acc_norm": 0.5490196078431373,
"acc_norm_stderr": 0.028491993586171566
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6045016077170418,
"acc_stderr": 0.02777091853142784,
"acc_norm": 0.6045016077170418,
"acc_norm_stderr": 0.02777091853142784
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.6080246913580247,
"acc_stderr": 0.027163686038271146,
"acc_norm": 0.6080246913580247,
"acc_norm_stderr": 0.027163686038271146
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.3723404255319149,
"acc_stderr": 0.02883892147125146,
"acc_norm": 0.3723404255319149,
"acc_norm_stderr": 0.02883892147125146
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.3963494132985658,
"acc_stderr": 0.012492830452095217,
"acc_norm": 0.3963494132985658,
"acc_norm_stderr": 0.012492830452095217
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.4411764705882353,
"acc_stderr": 0.03016191193076711,
"acc_norm": 0.4411764705882353,
"acc_norm_stderr": 0.03016191193076711
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.5163398692810458,
"acc_stderr": 0.02021703065318646,
"acc_norm": 0.5163398692810458,
"acc_norm_stderr": 0.02021703065318646
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6090909090909091,
"acc_stderr": 0.04673752333670239,
"acc_norm": 0.6090909090909091,
"acc_norm_stderr": 0.04673752333670239
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.6081632653061224,
"acc_stderr": 0.031251275910891656,
"acc_norm": 0.6081632653061224,
"acc_norm_stderr": 0.031251275910891656
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.6467661691542289,
"acc_stderr": 0.03379790611796777,
"acc_norm": 0.6467661691542289,
"acc_norm_stderr": 0.03379790611796777
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-virology|5": {
"acc": 0.42168674698795183,
"acc_stderr": 0.03844453181770917,
"acc_norm": 0.42168674698795183,
"acc_norm_stderr": 0.03844453181770917
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.7134502923976608,
"acc_stderr": 0.03467826685703826,
"acc_norm": 0.7134502923976608,
"acc_norm_stderr": 0.03467826685703826
},
"harness|truthfulqa:mc|0": {
"mc1": 0.2998776009791922,
"mc1_stderr": 0.01604035296671362,
"mc2": 0.45697851910783077,
"mc2_stderr": 0.015427158150833389
},
"harness|winogrande|5": {
"acc": 0.739542225730071,
"acc_stderr": 0.012334833671998297
},
"harness|gsm8k|5": {
"acc": 0.18877937831690675,
"acc_stderr": 0.010779262837202751
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
abwqr/michael_scott | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 79558.0
num_examples: 12
download_size: 0
dataset_size: 79558.0
---
# Dataset Card for "michael_scott"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
shahidul034/text_summarization_dataset6 | ---
dataset_info:
features:
- name: title
dtype: string
- name: content
dtype: string
splits:
- name: train
num_bytes: 129713026
num_examples: 113562
download_size: 44838616
dataset_size: 129713026
---
# Dataset Card for "text_summarization_dataset6"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
parksimon0808/prm800k-llama-generator | ---
dataset_info:
features:
- name: texts
dtype: string
- name: input_ids
sequence: int32
- name: labels
sequence: int64
- name: answers
dtype: string
splits:
- name: train
num_bytes: 107264878
num_examples: 16465
- name: test
num_bytes: 4635493
num_examples: 773
download_size: 23666282
dataset_size: 111900371
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
# Dataset Card for "prm800k-llama-v3"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
MMShmBlogs/hmblogs-v3 | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 45957987986
num_examples: 16896817
download_size: 21302352262
dataset_size: 45957987986
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/details_pmking27__PrathameshLLM-2B | ---
pretty_name: Evaluation run of pmking27/PrathameshLLM-2B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [pmking27/PrathameshLLM-2B](https://huggingface.co/pmking27/PrathameshLLM-2B)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_pmking27__PrathameshLLM-2B\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-04-07T15:12:15.779927](https://huggingface.co/datasets/open-llm-leaderboard/details_pmking27__PrathameshLLM-2B/blob/main/results_2024-04-07T15-12-15.779927.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.38448642166678587,\n\
\ \"acc_stderr\": 0.03412711045658374,\n \"acc_norm\": 0.3882749361001093,\n\
\ \"acc_norm_stderr\": 0.03491709236659397,\n \"mc1\": 0.2913096695226438,\n\
\ \"mc1_stderr\": 0.01590598704818483,\n \"mc2\": 0.44689893094855493,\n\
\ \"mc2_stderr\": 0.014502506509597363\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.4257679180887372,\n \"acc_stderr\": 0.014449464278868809,\n\
\ \"acc_norm\": 0.447098976109215,\n \"acc_norm_stderr\": 0.014529380160526845\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5071698864767975,\n\
\ \"acc_stderr\": 0.00498926836296872,\n \"acc_norm\": 0.6840270862378013,\n\
\ \"acc_norm_stderr\": 0.004639520453444027\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \
\ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4074074074074074,\n\
\ \"acc_stderr\": 0.04244633238353228,\n \"acc_norm\": 0.4074074074074074,\n\
\ \"acc_norm_stderr\": 0.04244633238353228\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.375,\n \"acc_stderr\": 0.039397364351956274,\n \
\ \"acc_norm\": 0.375,\n \"acc_norm_stderr\": 0.039397364351956274\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.32,\n\
\ \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \
\ \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.4226415094339623,\n \"acc_stderr\": 0.03040233144576954,\n\
\ \"acc_norm\": 0.4226415094339623,\n \"acc_norm_stderr\": 0.03040233144576954\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4097222222222222,\n\
\ \"acc_stderr\": 0.04112490974670787,\n \"acc_norm\": 0.4097222222222222,\n\
\ \"acc_norm_stderr\": 0.04112490974670787\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.26,\n\
\ \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \
\ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.32947976878612717,\n\
\ \"acc_stderr\": 0.03583901754736411,\n \"acc_norm\": 0.32947976878612717,\n\
\ \"acc_norm_stderr\": 0.03583901754736411\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.039505818611799616,\n\
\ \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.039505818611799616\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n\
\ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.37446808510638296,\n \"acc_stderr\": 0.03163910665367291,\n\
\ \"acc_norm\": 0.37446808510638296,\n \"acc_norm_stderr\": 0.03163910665367291\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n\
\ \"acc_stderr\": 0.04266339443159394,\n \"acc_norm\": 0.2894736842105263,\n\
\ \"acc_norm_stderr\": 0.04266339443159394\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.45517241379310347,\n \"acc_stderr\": 0.04149886942192117,\n\
\ \"acc_norm\": 0.45517241379310347,\n \"acc_norm_stderr\": 0.04149886942192117\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.26455026455026454,\n \"acc_stderr\": 0.022717467897708617,\n \"\
acc_norm\": 0.26455026455026454,\n \"acc_norm_stderr\": 0.022717467897708617\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30158730158730157,\n\
\ \"acc_stderr\": 0.04104947269903394,\n \"acc_norm\": 0.30158730158730157,\n\
\ \"acc_norm_stderr\": 0.04104947269903394\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.4290322580645161,\n\
\ \"acc_stderr\": 0.02815603653823321,\n \"acc_norm\": 0.4290322580645161,\n\
\ \"acc_norm_stderr\": 0.02815603653823321\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.33497536945812806,\n \"acc_stderr\": 0.033208527423483104,\n\
\ \"acc_norm\": 0.33497536945812806,\n \"acc_norm_stderr\": 0.033208527423483104\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\"\
: 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.3696969696969697,\n \"acc_stderr\": 0.03769430314512568,\n\
\ \"acc_norm\": 0.3696969696969697,\n \"acc_norm_stderr\": 0.03769430314512568\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.46464646464646464,\n \"acc_stderr\": 0.03553436368828063,\n \"\
acc_norm\": 0.46464646464646464,\n \"acc_norm_stderr\": 0.03553436368828063\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.5077720207253886,\n \"acc_stderr\": 0.03608003225569654,\n\
\ \"acc_norm\": 0.5077720207253886,\n \"acc_norm_stderr\": 0.03608003225569654\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.38461538461538464,\n \"acc_stderr\": 0.024666744915187222,\n\
\ \"acc_norm\": 0.38461538461538464,\n \"acc_norm_stderr\": 0.024666744915187222\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.2518518518518518,\n \"acc_stderr\": 0.02646611753895992,\n \
\ \"acc_norm\": 0.2518518518518518,\n \"acc_norm_stderr\": 0.02646611753895992\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.3949579831932773,\n \"acc_stderr\": 0.03175367846096626,\n \
\ \"acc_norm\": 0.3949579831932773,\n \"acc_norm_stderr\": 0.03175367846096626\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.2913907284768212,\n \"acc_stderr\": 0.03710185726119995,\n \"\
acc_norm\": 0.2913907284768212,\n \"acc_norm_stderr\": 0.03710185726119995\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.43853211009174314,\n \"acc_stderr\": 0.021274713073954565,\n \"\
acc_norm\": 0.43853211009174314,\n \"acc_norm_stderr\": 0.021274713073954565\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.2962962962962963,\n \"acc_stderr\": 0.031141447823536037,\n \"\
acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.031141447823536037\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.37254901960784315,\n \"acc_stderr\": 0.03393388584958404,\n \"\
acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.03393388584958404\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.4050632911392405,\n \"acc_stderr\": 0.03195514741370673,\n \
\ \"acc_norm\": 0.4050632911392405,\n \"acc_norm_stderr\": 0.03195514741370673\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3991031390134529,\n\
\ \"acc_stderr\": 0.03286745312567961,\n \"acc_norm\": 0.3991031390134529,\n\
\ \"acc_norm_stderr\": 0.03286745312567961\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.42748091603053434,\n \"acc_stderr\": 0.04338920305792401,\n\
\ \"acc_norm\": 0.42748091603053434,\n \"acc_norm_stderr\": 0.04338920305792401\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.5454545454545454,\n \"acc_stderr\": 0.04545454545454546,\n \"\
acc_norm\": 0.5454545454545454,\n \"acc_norm_stderr\": 0.04545454545454546\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.37037037037037035,\n\
\ \"acc_stderr\": 0.04668408033024932,\n \"acc_norm\": 0.37037037037037035,\n\
\ \"acc_norm_stderr\": 0.04668408033024932\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.3619631901840491,\n \"acc_stderr\": 0.037757007291414416,\n\
\ \"acc_norm\": 0.3619631901840491,\n \"acc_norm_stderr\": 0.037757007291414416\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.38392857142857145,\n\
\ \"acc_stderr\": 0.04616143075028547,\n \"acc_norm\": 0.38392857142857145,\n\
\ \"acc_norm_stderr\": 0.04616143075028547\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.4854368932038835,\n \"acc_stderr\": 0.049486373240266376,\n\
\ \"acc_norm\": 0.4854368932038835,\n \"acc_norm_stderr\": 0.049486373240266376\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.5726495726495726,\n\
\ \"acc_stderr\": 0.032408473935163266,\n \"acc_norm\": 0.5726495726495726,\n\
\ \"acc_norm_stderr\": 0.032408473935163266\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237101,\n \
\ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237101\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5159642401021711,\n\
\ \"acc_stderr\": 0.01787084750608173,\n \"acc_norm\": 0.5159642401021711,\n\
\ \"acc_norm_stderr\": 0.01787084750608173\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.3786127167630058,\n \"acc_stderr\": 0.02611374936131034,\n\
\ \"acc_norm\": 0.3786127167630058,\n \"acc_norm_stderr\": 0.02611374936131034\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.25139664804469275,\n\
\ \"acc_stderr\": 0.014508979453553988,\n \"acc_norm\": 0.25139664804469275,\n\
\ \"acc_norm_stderr\": 0.014508979453553988\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.48366013071895425,\n \"acc_stderr\": 0.028614624752805407,\n\
\ \"acc_norm\": 0.48366013071895425,\n \"acc_norm_stderr\": 0.028614624752805407\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.41479099678456594,\n\
\ \"acc_stderr\": 0.027982680459759553,\n \"acc_norm\": 0.41479099678456594,\n\
\ \"acc_norm_stderr\": 0.027982680459759553\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.4382716049382716,\n \"acc_stderr\": 0.027607914087400483,\n\
\ \"acc_norm\": 0.4382716049382716,\n \"acc_norm_stderr\": 0.027607914087400483\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.28368794326241137,\n \"acc_stderr\": 0.026891709428343968,\n \
\ \"acc_norm\": 0.28368794326241137,\n \"acc_norm_stderr\": 0.026891709428343968\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3116036505867014,\n\
\ \"acc_stderr\": 0.01182903918284965,\n \"acc_norm\": 0.3116036505867014,\n\
\ \"acc_norm_stderr\": 0.01182903918284965\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.29044117647058826,\n \"acc_stderr\": 0.027576468622740522,\n\
\ \"acc_norm\": 0.29044117647058826,\n \"acc_norm_stderr\": 0.027576468622740522\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.3545751633986928,\n \"acc_stderr\": 0.0193533605475537,\n \
\ \"acc_norm\": 0.3545751633986928,\n \"acc_norm_stderr\": 0.0193533605475537\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.4090909090909091,\n\
\ \"acc_stderr\": 0.047093069786618966,\n \"acc_norm\": 0.4090909090909091,\n\
\ \"acc_norm_stderr\": 0.047093069786618966\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.4122448979591837,\n \"acc_stderr\": 0.03151236044674281,\n\
\ \"acc_norm\": 0.4122448979591837,\n \"acc_norm_stderr\": 0.03151236044674281\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.472636815920398,\n\
\ \"acc_stderr\": 0.035302355173346824,\n \"acc_norm\": 0.472636815920398,\n\
\ \"acc_norm_stderr\": 0.035302355173346824\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \
\ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3855421686746988,\n\
\ \"acc_stderr\": 0.037891344246115496,\n \"acc_norm\": 0.3855421686746988,\n\
\ \"acc_norm_stderr\": 0.037891344246115496\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.03834234744164993,\n\
\ \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.03834234744164993\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2913096695226438,\n\
\ \"mc1_stderr\": 0.01590598704818483,\n \"mc2\": 0.44689893094855493,\n\
\ \"mc2_stderr\": 0.014502506509597363\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.6511444356748224,\n \"acc_stderr\": 0.01339505932013732\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.09249431387414708,\n \
\ \"acc_stderr\": 0.007980396874560178\n }\n}\n```"
repo_url: https://huggingface.co/pmking27/PrathameshLLM-2B
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|arc:challenge|25_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|gsm8k|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hellaswag|10_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-04-07T15-12-15.779927.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-management|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-virology|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|truthfulqa:mc|0_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-04-07T15-12-15.779927.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- '**/details_harness|winogrande|5_2024-04-07T15-12-15.779927.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-04-07T15-12-15.779927.parquet'
- config_name: results
data_files:
- split: 2024_04_07T15_12_15.779927
path:
- results_2024-04-07T15-12-15.779927.parquet
- split: latest
path:
- results_2024-04-07T15-12-15.779927.parquet
---
# Dataset Card for Evaluation run of pmking27/PrathameshLLM-2B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [pmking27/PrathameshLLM-2B](https://huggingface.co/pmking27/PrathameshLLM-2B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_pmking27__PrathameshLLM-2B",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-04-07T15:12:15.779927](https://huggingface.co/datasets/open-llm-leaderboard/details_pmking27__PrathameshLLM-2B/blob/main/results_2024-04-07T15-12-15.779927.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.38448642166678587,
"acc_stderr": 0.03412711045658374,
"acc_norm": 0.3882749361001093,
"acc_norm_stderr": 0.03491709236659397,
"mc1": 0.2913096695226438,
"mc1_stderr": 0.01590598704818483,
"mc2": 0.44689893094855493,
"mc2_stderr": 0.014502506509597363
},
"harness|arc:challenge|25": {
"acc": 0.4257679180887372,
"acc_stderr": 0.014449464278868809,
"acc_norm": 0.447098976109215,
"acc_norm_stderr": 0.014529380160526845
},
"harness|hellaswag|10": {
"acc": 0.5071698864767975,
"acc_stderr": 0.00498926836296872,
"acc_norm": 0.6840270862378013,
"acc_norm_stderr": 0.004639520453444027
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768081,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768081
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.4074074074074074,
"acc_stderr": 0.04244633238353228,
"acc_norm": 0.4074074074074074,
"acc_norm_stderr": 0.04244633238353228
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.375,
"acc_stderr": 0.039397364351956274,
"acc_norm": 0.375,
"acc_norm_stderr": 0.039397364351956274
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.4226415094339623,
"acc_stderr": 0.03040233144576954,
"acc_norm": 0.4226415094339623,
"acc_norm_stderr": 0.03040233144576954
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.4097222222222222,
"acc_stderr": 0.04112490974670787,
"acc_norm": 0.4097222222222222,
"acc_norm_stderr": 0.04112490974670787
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768078,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768078
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.32947976878612717,
"acc_stderr": 0.03583901754736411,
"acc_norm": 0.32947976878612717,
"acc_norm_stderr": 0.03583901754736411
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.19607843137254902,
"acc_stderr": 0.039505818611799616,
"acc_norm": 0.19607843137254902,
"acc_norm_stderr": 0.039505818611799616
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.37446808510638296,
"acc_stderr": 0.03163910665367291,
"acc_norm": 0.37446808510638296,
"acc_norm_stderr": 0.03163910665367291
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.2894736842105263,
"acc_stderr": 0.04266339443159394,
"acc_norm": 0.2894736842105263,
"acc_norm_stderr": 0.04266339443159394
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.45517241379310347,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.45517241379310347,
"acc_norm_stderr": 0.04149886942192117
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.26455026455026454,
"acc_stderr": 0.022717467897708617,
"acc_norm": 0.26455026455026454,
"acc_norm_stderr": 0.022717467897708617
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.30158730158730157,
"acc_stderr": 0.04104947269903394,
"acc_norm": 0.30158730158730157,
"acc_norm_stderr": 0.04104947269903394
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.4290322580645161,
"acc_stderr": 0.02815603653823321,
"acc_norm": 0.4290322580645161,
"acc_norm_stderr": 0.02815603653823321
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.33497536945812806,
"acc_stderr": 0.033208527423483104,
"acc_norm": 0.33497536945812806,
"acc_norm_stderr": 0.033208527423483104
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.3696969696969697,
"acc_stderr": 0.03769430314512568,
"acc_norm": 0.3696969696969697,
"acc_norm_stderr": 0.03769430314512568
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.46464646464646464,
"acc_stderr": 0.03553436368828063,
"acc_norm": 0.46464646464646464,
"acc_norm_stderr": 0.03553436368828063
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.5077720207253886,
"acc_stderr": 0.03608003225569654,
"acc_norm": 0.5077720207253886,
"acc_norm_stderr": 0.03608003225569654
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.38461538461538464,
"acc_stderr": 0.024666744915187222,
"acc_norm": 0.38461538461538464,
"acc_norm_stderr": 0.024666744915187222
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2518518518518518,
"acc_stderr": 0.02646611753895992,
"acc_norm": 0.2518518518518518,
"acc_norm_stderr": 0.02646611753895992
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.3949579831932773,
"acc_stderr": 0.03175367846096626,
"acc_norm": 0.3949579831932773,
"acc_norm_stderr": 0.03175367846096626
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.2913907284768212,
"acc_stderr": 0.03710185726119995,
"acc_norm": 0.2913907284768212,
"acc_norm_stderr": 0.03710185726119995
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.43853211009174314,
"acc_stderr": 0.021274713073954565,
"acc_norm": 0.43853211009174314,
"acc_norm_stderr": 0.021274713073954565
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.031141447823536037,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.031141447823536037
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.37254901960784315,
"acc_stderr": 0.03393388584958404,
"acc_norm": 0.37254901960784315,
"acc_norm_stderr": 0.03393388584958404
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.4050632911392405,
"acc_stderr": 0.03195514741370673,
"acc_norm": 0.4050632911392405,
"acc_norm_stderr": 0.03195514741370673
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.3991031390134529,
"acc_stderr": 0.03286745312567961,
"acc_norm": 0.3991031390134529,
"acc_norm_stderr": 0.03286745312567961
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.42748091603053434,
"acc_stderr": 0.04338920305792401,
"acc_norm": 0.42748091603053434,
"acc_norm_stderr": 0.04338920305792401
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.5454545454545454,
"acc_stderr": 0.04545454545454546,
"acc_norm": 0.5454545454545454,
"acc_norm_stderr": 0.04545454545454546
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.37037037037037035,
"acc_stderr": 0.04668408033024932,
"acc_norm": 0.37037037037037035,
"acc_norm_stderr": 0.04668408033024932
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.3619631901840491,
"acc_stderr": 0.037757007291414416,
"acc_norm": 0.3619631901840491,
"acc_norm_stderr": 0.037757007291414416
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.38392857142857145,
"acc_stderr": 0.04616143075028547,
"acc_norm": 0.38392857142857145,
"acc_norm_stderr": 0.04616143075028547
},
"harness|hendrycksTest-management|5": {
"acc": 0.4854368932038835,
"acc_stderr": 0.049486373240266376,
"acc_norm": 0.4854368932038835,
"acc_norm_stderr": 0.049486373240266376
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.5726495726495726,
"acc_stderr": 0.032408473935163266,
"acc_norm": 0.5726495726495726,
"acc_norm_stderr": 0.032408473935163266
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.41,
"acc_stderr": 0.04943110704237101,
"acc_norm": 0.41,
"acc_norm_stderr": 0.04943110704237101
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.5159642401021711,
"acc_stderr": 0.01787084750608173,
"acc_norm": 0.5159642401021711,
"acc_norm_stderr": 0.01787084750608173
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.3786127167630058,
"acc_stderr": 0.02611374936131034,
"acc_norm": 0.3786127167630058,
"acc_norm_stderr": 0.02611374936131034
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.25139664804469275,
"acc_stderr": 0.014508979453553988,
"acc_norm": 0.25139664804469275,
"acc_norm_stderr": 0.014508979453553988
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.48366013071895425,
"acc_stderr": 0.028614624752805407,
"acc_norm": 0.48366013071895425,
"acc_norm_stderr": 0.028614624752805407
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.41479099678456594,
"acc_stderr": 0.027982680459759553,
"acc_norm": 0.41479099678456594,
"acc_norm_stderr": 0.027982680459759553
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.4382716049382716,
"acc_stderr": 0.027607914087400483,
"acc_norm": 0.4382716049382716,
"acc_norm_stderr": 0.027607914087400483
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.28368794326241137,
"acc_stderr": 0.026891709428343968,
"acc_norm": 0.28368794326241137,
"acc_norm_stderr": 0.026891709428343968
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.3116036505867014,
"acc_stderr": 0.01182903918284965,
"acc_norm": 0.3116036505867014,
"acc_norm_stderr": 0.01182903918284965
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.29044117647058826,
"acc_stderr": 0.027576468622740522,
"acc_norm": 0.29044117647058826,
"acc_norm_stderr": 0.027576468622740522
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.3545751633986928,
"acc_stderr": 0.0193533605475537,
"acc_norm": 0.3545751633986928,
"acc_norm_stderr": 0.0193533605475537
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.4090909090909091,
"acc_stderr": 0.047093069786618966,
"acc_norm": 0.4090909090909091,
"acc_norm_stderr": 0.047093069786618966
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.4122448979591837,
"acc_stderr": 0.03151236044674281,
"acc_norm": 0.4122448979591837,
"acc_norm_stderr": 0.03151236044674281
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.472636815920398,
"acc_stderr": 0.035302355173346824,
"acc_norm": 0.472636815920398,
"acc_norm_stderr": 0.035302355173346824
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-virology|5": {
"acc": 0.3855421686746988,
"acc_stderr": 0.037891344246115496,
"acc_norm": 0.3855421686746988,
"acc_norm_stderr": 0.037891344246115496
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.03834234744164993,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.03834234744164993
},
"harness|truthfulqa:mc|0": {
"mc1": 0.2913096695226438,
"mc1_stderr": 0.01590598704818483,
"mc2": 0.44689893094855493,
"mc2_stderr": 0.014502506509597363
},
"harness|winogrande|5": {
"acc": 0.6511444356748224,
"acc_stderr": 0.01339505932013732
},
"harness|gsm8k|5": {
"acc": 0.09249431387414708,
"acc_stderr": 0.007980396874560178
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
axelprsvl/my_dataset | ---
dataset_info:
features:
- name: audio
dtype: audio
splits:
- name: train
num_bytes: 40520175.0
num_examples: 5
download_size: 40474142
dataset_size: 40520175.0
---
# Dataset Card for "my_dataset"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
tyzhu/wikitext-2-raw-v1-shuffled | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 11018604
num_examples: 36718
- name: validation
num_bytes: 1159288
num_examples: 3760
download_size: 7055354
dataset_size: 12177892
---
# Dataset Card for "wikitext-2-raw-v1-shuffled"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
ovior/twitter_dataset_1713165252 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 2753460
num_examples: 7970
download_size: 1590780
dataset_size: 2753460
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
guilhermebo/embeddings | ---
license: creativeml-openrail-m
---
|
telugu_news | ---
annotations_creators:
- machine-generated
language_creators:
- other
language:
- te
license:
- unknown
multilinguality:
- monolingual
size_categories:
- 10K<n<100K
source_datasets:
- original
task_categories:
- text-generation
- fill-mask
- text-classification
task_ids:
- language-modeling
- masked-language-modeling
- multi-class-classification
- topic-classification
pretty_name: TeluguNews
dataset_info:
features:
- name: sno
dtype: int32
- name: date
dtype: string
- name: heading
dtype: string
- name: body
dtype: string
- name: topic
dtype:
class_label:
names:
'0': business
'1': editorial
'2': entertainment
'3': nation
'4': sports
splits:
- name: train
num_bytes: 69400234
num_examples: 17312
- name: test
num_bytes: 17265514
num_examples: 4329
download_size: 0
dataset_size: 86665748
---
# Dataset Card for [Dataset Name]
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** https://www.kaggle.com/sudalairajkumar/telugu-nlp?select=telugu_news
- **Repository:** https://github.com/AnushaMotamarri/Telugu-Newspaper-Article-Dataset
### Dataset Summary
This dataset contains Telugu language news articles along with respective topic
labels (business, editorial, entertainment, nation, sport) extracted from the daily Andhra Jyoti.
This dataset could be used to build Classification and Language Models.
### Supported Tasks and Leaderboards
Multiclass classification, Topic Classification, Language Model
### Languages
TE - Telugu, India
## Dataset Structure
### Data Instances
Two CSV files (train, test) with five columns (sno, date, heading, body, topic).
### Data Fields
- sno: id
- date: publish date of the news article
- heading: article heading/title
- body: article body/content
- topic: one of the following topics (business, editorial, entertainment, nation, sport)
### Data Splits
Train and Test
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
- https://www.kaggle.com/sudalairajkumar/telugu-nlp?select=telugu_news
- https://github.com/AnushaMotamarri/Telugu-Newspaper-Article-Dataset
#### Initial Data Collection and Normalization
The source data is scraped articles from archives of Telugu newspaper website Andhra Jyoti.
A set of queries were created and the corresponding ground truth answers were retrieved by a combination of BM25 and tf-idf.
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
Sudalai Rajkumar, Anusha Motamarri
### Licensing Information
[More Information Needed]
### Citation Information
```
@InProceedings{kaggle:dataset,
title = {Telugu News - Natural Language Processing for Indian Languages},
authors={Sudalai Rajkumar, Anusha Motamarri},
year={2019}
}
```
### Contributions
Thanks to [@oostopitre](https://github.com/oostopitre) for adding this dataset. |
income/trec-covid-top-20-gen-queries | ---
annotations_creators: []
language_creators: []
language:
- en
license:
- cc-by-sa-4.0
multilinguality:
- monolingual
paperswithcode_id: beir
pretty_name: BEIR Benchmark
size_categories:
msmarco:
- 1M<n<10M
trec-covid:
- 100k<n<1M
nfcorpus:
- 1K<n<10K
nq:
- 1M<n<10M
hotpotqa:
- 1M<n<10M
fiqa:
- 10K<n<100K
arguana:
- 1K<n<10K
touche-2020:
- 100K<n<1M
cqadupstack:
- 100K<n<1M
quora:
- 100K<n<1M
dbpedia:
- 1M<n<10M
scidocs:
- 10K<n<100K
fever:
- 1M<n<10M
climate-fever:
- 1M<n<10M
scifact:
- 1K<n<10K
source_datasets: []
task_categories:
- text-retrieval
---
# NFCorpus: 20 generated queries (BEIR Benchmark)
This HF dataset contains the top-20 synthetic queries generated for each passage in the above BEIR benchmark dataset.
- DocT5query model used: [BeIR/query-gen-msmarco-t5-base-v1](https://huggingface.co/BeIR/query-gen-msmarco-t5-base-v1)
- id (str): unique document id in NFCorpus in the BEIR benchmark (`corpus.jsonl`).
- Questions generated: 20
- Code used for generation: [evaluate_anserini_docT5query_parallel.py](https://github.com/beir-cellar/beir/blob/main/examples/retrieval/evaluation/sparse/evaluate_anserini_docT5query_parallel.py)
Below contains the old dataset card for the BEIR benchmark.
# Dataset Card for BEIR Benchmark
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** https://github.com/UKPLab/beir
- **Repository:** https://github.com/UKPLab/beir
- **Paper:** https://openreview.net/forum?id=wCu6T5xFjeJ
- **Leaderboard:** https://docs.google.com/spreadsheets/d/1L8aACyPaXrL8iEelJLGqlMqXKPX2oSP_R10pZoy77Ns
- **Point of Contact:** nandan.thakur@uwaterloo.ca
### Dataset Summary
BEIR is a heterogeneous benchmark that has been built from 18 diverse datasets representing 9 information retrieval tasks:
- Fact-checking: [FEVER](http://fever.ai), [Climate-FEVER](http://climatefever.ai), [SciFact](https://github.com/allenai/scifact)
- Question-Answering: [NQ](https://ai.google.com/research/NaturalQuestions), [HotpotQA](https://hotpotqa.github.io), [FiQA-2018](https://sites.google.com/view/fiqa/)
- Bio-Medical IR: [TREC-COVID](https://ir.nist.gov/covidSubmit/index.html), [BioASQ](http://bioasq.org), [NFCorpus](https://www.cl.uni-heidelberg.de/statnlpgroup/nfcorpus/)
- News Retrieval: [TREC-NEWS](https://trec.nist.gov/data/news2019.html), [Robust04](https://trec.nist.gov/data/robust/04.guidelines.html)
- Argument Retrieval: [Touche-2020](https://webis.de/events/touche-20/shared-task-1.html), [ArguAna](tp://argumentation.bplaced.net/arguana/data)
- Duplicate Question Retrieval: [Quora](https://www.quora.com/q/quoradata/First-Quora-Dataset-Release-Question-Pairs), [CqaDupstack](http://nlp.cis.unimelb.edu.au/resources/cqadupstack/)
- Citation-Prediction: [SCIDOCS](https://allenai.org/data/scidocs)
- Tweet Retrieval: [Signal-1M](https://research.signal-ai.com/datasets/signal1m-tweetir.html)
- Entity Retrieval: [DBPedia](https://github.com/iai-group/DBpedia-Entity/)
All these datasets have been preprocessed and can be used for your experiments.
```python
```
### Supported Tasks and Leaderboards
The dataset supports a leaderboard that evaluates models against task-specific metrics such as F1 or EM, as well as their ability to retrieve supporting information from Wikipedia.
The current best performing models can be found [here](https://eval.ai/web/challenges/challenge-page/689/leaderboard/).
### Languages
All tasks are in English (`en`).
## Dataset Structure
All BEIR datasets must contain a corpus, queries and qrels (relevance judgments file). They must be in the following format:
- `corpus` file: a `.jsonl` file (jsonlines) that contains a list of dictionaries, each with three fields `_id` with unique document identifier, `title` with document title (optional) and `text` with document paragraph or passage. For example: `{"_id": "doc1", "title": "Albert Einstein", "text": "Albert Einstein was a German-born...."}`
- `queries` file: a `.jsonl` file (jsonlines) that contains a list of dictionaries, each with two fields `_id` with unique query identifier and `text` with query text. For example: `{"_id": "q1", "text": "Who developed the mass-energy equivalence formula?"}`
- `qrels` file: a `.tsv` file (tab-seperated) that contains three columns, i.e. the `query-id`, `corpus-id` and `score` in this order. Keep 1st row as header. For example: `q1 doc1 1`
### Data Instances
A high level example of any beir dataset:
```python
corpus = {
"doc1" : {
"title": "Albert Einstein",
"text": "Albert Einstein was a German-born theoretical physicist. who developed the theory of relativity, \
one of the two pillars of modern physics (alongside quantum mechanics). His work is also known for \
its influence on the philosophy of science. He is best known to the general public for his mass–energy \
equivalence formula E = mc2, which has been dubbed 'the world's most famous equation'. He received the 1921 \
Nobel Prize in Physics 'for his services to theoretical physics, and especially for his discovery of the law \
of the photoelectric effect', a pivotal step in the development of quantum theory."
},
"doc2" : {
"title": "", # Keep title an empty string if not present
"text": "Wheat beer is a top-fermented beer which is brewed with a large proportion of wheat relative to the amount of \
malted barley. The two main varieties are German Weißbier and Belgian witbier; other types include Lambic (made\
with wild yeast), Berliner Weisse (a cloudy, sour beer), and Gose (a sour, salty beer)."
},
}
queries = {
"q1" : "Who developed the mass-energy equivalence formula?",
"q2" : "Which beer is brewed with a large proportion of wheat?"
}
qrels = {
"q1" : {"doc1": 1},
"q2" : {"doc2": 1},
}
```
### Data Fields
Examples from all configurations have the following features:
### Corpus
- `corpus`: a `dict` feature representing the document title and passage text, made up of:
- `_id`: a `string` feature representing the unique document id
- `title`: a `string` feature, denoting the title of the document.
- `text`: a `string` feature, denoting the text of the document.
### Queries
- `queries`: a `dict` feature representing the query, made up of:
- `_id`: a `string` feature representing the unique query id
- `text`: a `string` feature, denoting the text of the query.
### Qrels
- `qrels`: a `dict` feature representing the query document relevance judgements, made up of:
- `_id`: a `string` feature representing the query id
- `_id`: a `string` feature, denoting the document id.
- `score`: a `int32` feature, denoting the relevance judgement between query and document.
### Data Splits
| Dataset | Website| BEIR-Name | Type | Queries | Corpus | Rel D/Q | Down-load | md5 |
| -------- | -----| ---------| --------- | ----------- | ---------| ---------| :----------: | :------:|
| MSMARCO | [Homepage](https://microsoft.github.io/msmarco/)| ``msmarco`` | ``train``<br>``dev``<br>``test``| 6,980 | 8.84M | 1.1 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/msmarco.zip) | ``444067daf65d982533ea17ebd59501e4`` |
| TREC-COVID | [Homepage](https://ir.nist.gov/covidSubmit/index.html)| ``trec-covid``| ``test``| 50| 171K| 493.5 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/trec-covid.zip) | ``ce62140cb23feb9becf6270d0d1fe6d1`` |
| NFCorpus | [Homepage](https://www.cl.uni-heidelberg.de/statnlpgroup/nfcorpus/) | ``nfcorpus`` | ``train``<br>``dev``<br>``test``| 323 | 3.6K | 38.2 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/nfcorpus.zip) | ``a89dba18a62ef92f7d323ec890a0d38d`` |
| BioASQ | [Homepage](http://bioasq.org) | ``bioasq``| ``train``<br>``test`` | 500 | 14.91M | 8.05 | No | [How to Reproduce?](https://github.com/UKPLab/beir/blob/main/examples/dataset#2-bioasq) |
| NQ | [Homepage](https://ai.google.com/research/NaturalQuestions) | ``nq``| ``train``<br>``test``| 3,452 | 2.68M | 1.2 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/nq.zip) | ``d4d3d2e48787a744b6f6e691ff534307`` |
| HotpotQA | [Homepage](https://hotpotqa.github.io) | ``hotpotqa``| ``train``<br>``dev``<br>``test``| 7,405 | 5.23M | 2.0 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/hotpotqa.zip) | ``f412724f78b0d91183a0e86805e16114`` |
| FiQA-2018 | [Homepage](https://sites.google.com/view/fiqa/) | ``fiqa`` | ``train``<br>``dev``<br>``test``| 648 | 57K | 2.6 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/fiqa.zip) | ``17918ed23cd04fb15047f73e6c3bd9d9`` |
| Signal-1M(RT) | [Homepage](https://research.signal-ai.com/datasets/signal1m-tweetir.html)| ``signal1m`` | ``test``| 97 | 2.86M | 19.6 | No | [How to Reproduce?](https://github.com/UKPLab/beir/blob/main/examples/dataset#4-signal-1m) |
| TREC-NEWS | [Homepage](https://trec.nist.gov/data/news2019.html) | ``trec-news`` | ``test``| 57 | 595K | 19.6 | No | [How to Reproduce?](https://github.com/UKPLab/beir/blob/main/examples/dataset#1-trec-news) |
| ArguAna | [Homepage](http://argumentation.bplaced.net/arguana/data) | ``arguana``| ``test`` | 1,406 | 8.67K | 1.0 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/arguana.zip) | ``8ad3e3c2a5867cdced806d6503f29b99`` |
| Touche-2020| [Homepage](https://webis.de/events/touche-20/shared-task-1.html) | ``webis-touche2020``| ``test``| 49 | 382K | 19.0 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/webis-touche2020.zip) | ``46f650ba5a527fc69e0a6521c5a23563`` |
| CQADupstack| [Homepage](http://nlp.cis.unimelb.edu.au/resources/cqadupstack/) | ``cqadupstack``| ``test``| 13,145 | 457K | 1.4 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/cqadupstack.zip) | ``4e41456d7df8ee7760a7f866133bda78`` |
| Quora| [Homepage](https://www.quora.com/q/quoradata/First-Quora-Dataset-Release-Question-Pairs) | ``quora``| ``dev``<br>``test``| 10,000 | 523K | 1.6 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/quora.zip) | ``18fb154900ba42a600f84b839c173167`` |
| DBPedia | [Homepage](https://github.com/iai-group/DBpedia-Entity/) | ``dbpedia-entity``| ``dev``<br>``test``| 400 | 4.63M | 38.2 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/dbpedia-entity.zip) | ``c2a39eb420a3164af735795df012ac2c`` |
| SCIDOCS| [Homepage](https://allenai.org/data/scidocs) | ``scidocs``| ``test``| 1,000 | 25K | 4.9 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/scidocs.zip) | ``38121350fc3a4d2f48850f6aff52e4a9`` |
| FEVER | [Homepage](http://fever.ai) | ``fever``| ``train``<br>``dev``<br>``test``| 6,666 | 5.42M | 1.2| [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/fever.zip) | ``5a818580227bfb4b35bb6fa46d9b6c03`` |
| Climate-FEVER| [Homepage](http://climatefever.ai) | ``climate-fever``|``test``| 1,535 | 5.42M | 3.0 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/climate-fever.zip) | ``8b66f0a9126c521bae2bde127b4dc99d`` |
| SciFact| [Homepage](https://github.com/allenai/scifact) | ``scifact``| ``train``<br>``test``| 300 | 5K | 1.1 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/scifact.zip) | ``5f7d1de60b170fc8027bb7898e2efca1`` |
| Robust04 | [Homepage](https://trec.nist.gov/data/robust/04.guidelines.html) | ``robust04``| ``test``| 249 | 528K | 69.9 | No | [How to Reproduce?](https://github.com/UKPLab/beir/blob/main/examples/dataset#3-robust04) |
## Dataset Creation
### Curation Rationale
[Needs More Information]
### Source Data
#### Initial Data Collection and Normalization
[Needs More Information]
#### Who are the source language producers?
[Needs More Information]
### Annotations
#### Annotation process
[Needs More Information]
#### Who are the annotators?
[Needs More Information]
### Personal and Sensitive Information
[Needs More Information]
## Considerations for Using the Data
### Social Impact of Dataset
[Needs More Information]
### Discussion of Biases
[Needs More Information]
### Other Known Limitations
[Needs More Information]
## Additional Information
### Dataset Curators
[Needs More Information]
### Licensing Information
[Needs More Information]
### Citation Information
Cite as:
```
@inproceedings{
thakur2021beir,
title={{BEIR}: A Heterogeneous Benchmark for Zero-shot Evaluation of Information Retrieval Models},
author={Nandan Thakur and Nils Reimers and Andreas R{\"u}ckl{\'e} and Abhishek Srivastava and Iryna Gurevych},
booktitle={Thirty-fifth Conference on Neural Information Processing Systems Datasets and Benchmarks Track (Round 2)},
year={2021},
url={https://openreview.net/forum?id=wCu6T5xFjeJ}
}
```
### Contributions
Thanks to [@Nthakur20](https://github.com/Nthakur20) for adding this dataset.Top-20 generated queries for every passage in NFCorpus
# Dataset Card for BEIR Benchmark
## Table of Contents
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** https://github.com/UKPLab/beir
- **Repository:** https://github.com/UKPLab/beir
- **Paper:** https://openreview.net/forum?id=wCu6T5xFjeJ
- **Leaderboard:** https://docs.google.com/spreadsheets/d/1L8aACyPaXrL8iEelJLGqlMqXKPX2oSP_R10pZoy77Ns
- **Point of Contact:** nandan.thakur@uwaterloo.ca
### Dataset Summary
BEIR is a heterogeneous benchmark that has been built from 18 diverse datasets representing 9 information retrieval tasks:
- Fact-checking: [FEVER](http://fever.ai), [Climate-FEVER](http://climatefever.ai), [SciFact](https://github.com/allenai/scifact)
- Question-Answering: [NQ](https://ai.google.com/research/NaturalQuestions), [HotpotQA](https://hotpotqa.github.io), [FiQA-2018](https://sites.google.com/view/fiqa/)
- Bio-Medical IR: [TREC-COVID](https://ir.nist.gov/covidSubmit/index.html), [BioASQ](http://bioasq.org), [NFCorpus](https://www.cl.uni-heidelberg.de/statnlpgroup/nfcorpus/)
- News Retrieval: [TREC-NEWS](https://trec.nist.gov/data/news2019.html), [Robust04](https://trec.nist.gov/data/robust/04.guidelines.html)
- Argument Retrieval: [Touche-2020](https://webis.de/events/touche-20/shared-task-1.html), [ArguAna](tp://argumentation.bplaced.net/arguana/data)
- Duplicate Question Retrieval: [Quora](https://www.quora.com/q/quoradata/First-Quora-Dataset-Release-Question-Pairs), [CqaDupstack](http://nlp.cis.unimelb.edu.au/resources/cqadupstack/)
- Citation-Prediction: [SCIDOCS](https://allenai.org/data/scidocs)
- Tweet Retrieval: [Signal-1M](https://research.signal-ai.com/datasets/signal1m-tweetir.html)
- Entity Retrieval: [DBPedia](https://github.com/iai-group/DBpedia-Entity/)
All these datasets have been preprocessed and can be used for your experiments.
```python
```
### Supported Tasks and Leaderboards
The dataset supports a leaderboard that evaluates models against task-specific metrics such as F1 or EM, as well as their ability to retrieve supporting information from Wikipedia.
The current best performing models can be found [here](https://eval.ai/web/challenges/challenge-page/689/leaderboard/).
### Languages
All tasks are in English (`en`).
## Dataset Structure
All BEIR datasets must contain a corpus, queries and qrels (relevance judgments file). They must be in the following format:
- `corpus` file: a `.jsonl` file (jsonlines) that contains a list of dictionaries, each with three fields `_id` with unique document identifier, `title` with document title (optional) and `text` with document paragraph or passage. For example: `{"_id": "doc1", "title": "Albert Einstein", "text": "Albert Einstein was a German-born...."}`
- `queries` file: a `.jsonl` file (jsonlines) that contains a list of dictionaries, each with two fields `_id` with unique query identifier and `text` with query text. For example: `{"_id": "q1", "text": "Who developed the mass-energy equivalence formula?"}`
- `qrels` file: a `.tsv` file (tab-seperated) that contains three columns, i.e. the `query-id`, `corpus-id` and `score` in this order. Keep 1st row as header. For example: `q1 doc1 1`
### Data Instances
A high level example of any beir dataset:
```python
corpus = {
"doc1" : {
"title": "Albert Einstein",
"text": "Albert Einstein was a German-born theoretical physicist. who developed the theory of relativity, \
one of the two pillars of modern physics (alongside quantum mechanics). His work is also known for \
its influence on the philosophy of science. He is best known to the general public for his mass–energy \
equivalence formula E = mc2, which has been dubbed 'the world's most famous equation'. He received the 1921 \
Nobel Prize in Physics 'for his services to theoretical physics, and especially for his discovery of the law \
of the photoelectric effect', a pivotal step in the development of quantum theory."
},
"doc2" : {
"title": "", # Keep title an empty string if not present
"text": "Wheat beer is a top-fermented beer which is brewed with a large proportion of wheat relative to the amount of \
malted barley. The two main varieties are German Weißbier and Belgian witbier; other types include Lambic (made\
with wild yeast), Berliner Weisse (a cloudy, sour beer), and Gose (a sour, salty beer)."
},
}
queries = {
"q1" : "Who developed the mass-energy equivalence formula?",
"q2" : "Which beer is brewed with a large proportion of wheat?"
}
qrels = {
"q1" : {"doc1": 1},
"q2" : {"doc2": 1},
}
```
### Data Fields
Examples from all configurations have the following features:
### Corpus
- `corpus`: a `dict` feature representing the document title and passage text, made up of:
- `_id`: a `string` feature representing the unique document id
- `title`: a `string` feature, denoting the title of the document.
- `text`: a `string` feature, denoting the text of the document.
### Queries
- `queries`: a `dict` feature representing the query, made up of:
- `_id`: a `string` feature representing the unique query id
- `text`: a `string` feature, denoting the text of the query.
### Qrels
- `qrels`: a `dict` feature representing the query document relevance judgements, made up of:
- `_id`: a `string` feature representing the query id
- `_id`: a `string` feature, denoting the document id.
- `score`: a `int32` feature, denoting the relevance judgement between query and document.
### Data Splits
| Dataset | Website| BEIR-Name | Type | Queries | Corpus | Rel D/Q | Down-load | md5 |
| -------- | -----| ---------| --------- | ----------- | ---------| ---------| :----------: | :------:|
| MSMARCO | [Homepage](https://microsoft.github.io/msmarco/)| ``msmarco`` | ``train``<br>``dev``<br>``test``| 6,980 | 8.84M | 1.1 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/msmarco.zip) | ``444067daf65d982533ea17ebd59501e4`` |
| TREC-COVID | [Homepage](https://ir.nist.gov/covidSubmit/index.html)| ``trec-covid``| ``test``| 50| 171K| 493.5 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/trec-covid.zip) | ``ce62140cb23feb9becf6270d0d1fe6d1`` |
| NFCorpus | [Homepage](https://www.cl.uni-heidelberg.de/statnlpgroup/nfcorpus/) | ``nfcorpus`` | ``train``<br>``dev``<br>``test``| 323 | 3.6K | 38.2 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/nfcorpus.zip) | ``a89dba18a62ef92f7d323ec890a0d38d`` |
| BioASQ | [Homepage](http://bioasq.org) | ``bioasq``| ``train``<br>``test`` | 500 | 14.91M | 8.05 | No | [How to Reproduce?](https://github.com/UKPLab/beir/blob/main/examples/dataset#2-bioasq) |
| NQ | [Homepage](https://ai.google.com/research/NaturalQuestions) | ``nq``| ``train``<br>``test``| 3,452 | 2.68M | 1.2 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/nq.zip) | ``d4d3d2e48787a744b6f6e691ff534307`` |
| HotpotQA | [Homepage](https://hotpotqa.github.io) | ``hotpotqa``| ``train``<br>``dev``<br>``test``| 7,405 | 5.23M | 2.0 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/hotpotqa.zip) | ``f412724f78b0d91183a0e86805e16114`` |
| FiQA-2018 | [Homepage](https://sites.google.com/view/fiqa/) | ``fiqa`` | ``train``<br>``dev``<br>``test``| 648 | 57K | 2.6 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/fiqa.zip) | ``17918ed23cd04fb15047f73e6c3bd9d9`` |
| Signal-1M(RT) | [Homepage](https://research.signal-ai.com/datasets/signal1m-tweetir.html)| ``signal1m`` | ``test``| 97 | 2.86M | 19.6 | No | [How to Reproduce?](https://github.com/UKPLab/beir/blob/main/examples/dataset#4-signal-1m) |
| TREC-NEWS | [Homepage](https://trec.nist.gov/data/news2019.html) | ``trec-news`` | ``test``| 57 | 595K | 19.6 | No | [How to Reproduce?](https://github.com/UKPLab/beir/blob/main/examples/dataset#1-trec-news) |
| ArguAna | [Homepage](http://argumentation.bplaced.net/arguana/data) | ``arguana``| ``test`` | 1,406 | 8.67K | 1.0 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/arguana.zip) | ``8ad3e3c2a5867cdced806d6503f29b99`` |
| Touche-2020| [Homepage](https://webis.de/events/touche-20/shared-task-1.html) | ``webis-touche2020``| ``test``| 49 | 382K | 19.0 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/webis-touche2020.zip) | ``46f650ba5a527fc69e0a6521c5a23563`` |
| CQADupstack| [Homepage](http://nlp.cis.unimelb.edu.au/resources/cqadupstack/) | ``cqadupstack``| ``test``| 13,145 | 457K | 1.4 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/cqadupstack.zip) | ``4e41456d7df8ee7760a7f866133bda78`` |
| Quora| [Homepage](https://www.quora.com/q/quoradata/First-Quora-Dataset-Release-Question-Pairs) | ``quora``| ``dev``<br>``test``| 10,000 | 523K | 1.6 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/quora.zip) | ``18fb154900ba42a600f84b839c173167`` |
| DBPedia | [Homepage](https://github.com/iai-group/DBpedia-Entity/) | ``dbpedia-entity``| ``dev``<br>``test``| 400 | 4.63M | 38.2 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/dbpedia-entity.zip) | ``c2a39eb420a3164af735795df012ac2c`` |
| SCIDOCS| [Homepage](https://allenai.org/data/scidocs) | ``scidocs``| ``test``| 1,000 | 25K | 4.9 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/scidocs.zip) | ``38121350fc3a4d2f48850f6aff52e4a9`` |
| FEVER | [Homepage](http://fever.ai) | ``fever``| ``train``<br>``dev``<br>``test``| 6,666 | 5.42M | 1.2| [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/fever.zip) | ``5a818580227bfb4b35bb6fa46d9b6c03`` |
| Climate-FEVER| [Homepage](http://climatefever.ai) | ``climate-fever``|``test``| 1,535 | 5.42M | 3.0 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/climate-fever.zip) | ``8b66f0a9126c521bae2bde127b4dc99d`` |
| SciFact| [Homepage](https://github.com/allenai/scifact) | ``scifact``| ``train``<br>``test``| 300 | 5K | 1.1 | [Link](https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/scifact.zip) | ``5f7d1de60b170fc8027bb7898e2efca1`` |
| Robust04 | [Homepage](https://trec.nist.gov/data/robust/04.guidelines.html) | ``robust04``| ``test``| 249 | 528K | 69.9 | No | [How to Reproduce?](https://github.com/UKPLab/beir/blob/main/examples/dataset#3-robust04) |
## Dataset Creation
### Curation Rationale
[Needs More Information]
### Source Data
#### Initial Data Collection and Normalization
[Needs More Information]
#### Who are the source language producers?
[Needs More Information]
### Annotations
#### Annotation process
[Needs More Information]
#### Who are the annotators?
[Needs More Information]
### Personal and Sensitive Information
[Needs More Information]
## Considerations for Using the Data
### Social Impact of Dataset
[Needs More Information]
### Discussion of Biases
[Needs More Information]
### Other Known Limitations
[Needs More Information]
## Additional Information
### Dataset Curators
[Needs More Information]
### Licensing Information
[Needs More Information]
### Citation Information
Cite as:
```
@inproceedings{
thakur2021beir,
title={{BEIR}: A Heterogeneous Benchmark for Zero-shot Evaluation of Information Retrieval Models},
author={Nandan Thakur and Nils Reimers and Andreas R{\"u}ckl{\'e} and Abhishek Srivastava and Iryna Gurevych},
booktitle={Thirty-fifth Conference on Neural Information Processing Systems Datasets and Benchmarks Track (Round 2)},
year={2021},
url={https://openreview.net/forum?id=wCu6T5xFjeJ}
}
```
### Contributions
Thanks to [@Nthakur20](https://github.com/Nthakur20) for adding this dataset. |
open-llm-leaderboard/details_chargoddard__mistral-11b-slimorca | ---
pretty_name: Evaluation run of chargoddard/mistral-11b-slimorca
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [chargoddard/mistral-11b-slimorca](https://huggingface.co/chargoddard/mistral-11b-slimorca)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_chargoddard__mistral-11b-slimorca\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-01-08T07:32:00.985160](https://huggingface.co/datasets/open-llm-leaderboard/details_chargoddard__mistral-11b-slimorca/blob/main/results_2024-01-08T07-32-00.985160.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6367892367382408,\n\
\ \"acc_stderr\": 0.032420458743968754,\n \"acc_norm\": 0.6401126581266797,\n\
\ \"acc_norm_stderr\": 0.03306588858456459,\n \"mc1\": 0.38310893512851896,\n\
\ \"mc1_stderr\": 0.017018461679389855,\n \"mc2\": 0.5466386336115909,\n\
\ \"mc2_stderr\": 0.015507674046261742\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6075085324232082,\n \"acc_stderr\": 0.01426963463567073,\n\
\ \"acc_norm\": 0.6424914675767918,\n \"acc_norm_stderr\": 0.014005494275916573\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6468830910177256,\n\
\ \"acc_stderr\": 0.004769618829196511,\n \"acc_norm\": 0.8380800637323242,\n\
\ \"acc_norm_stderr\": 0.0036762448867232586\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411021,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411021\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n\
\ \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n\
\ \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.03803510248351585,\n\
\ \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.03803510248351585\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n\
\ \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \
\ \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.690566037735849,\n \"acc_stderr\": 0.028450154794118637,\n\
\ \"acc_norm\": 0.690566037735849,\n \"acc_norm_stderr\": 0.028450154794118637\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7222222222222222,\n\
\ \"acc_stderr\": 0.037455547914624555,\n \"acc_norm\": 0.7222222222222222,\n\
\ \"acc_norm_stderr\": 0.037455547914624555\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \
\ \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n\
\ \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6069364161849711,\n\
\ \"acc_stderr\": 0.0372424959581773,\n \"acc_norm\": 0.6069364161849711,\n\
\ \"acc_norm_stderr\": 0.0372424959581773\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n\
\ \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n\
\ \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5702127659574469,\n \"acc_stderr\": 0.03236214467715564,\n\
\ \"acc_norm\": 0.5702127659574469,\n \"acc_norm_stderr\": 0.03236214467715564\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n\
\ \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n\
\ \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5310344827586206,\n \"acc_stderr\": 0.04158632762097828,\n\
\ \"acc_norm\": 0.5310344827586206,\n \"acc_norm_stderr\": 0.04158632762097828\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.41005291005291006,\n \"acc_stderr\": 0.02533120243894443,\n \"\
acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.02533120243894443\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n\
\ \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n\
\ \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7709677419354839,\n\
\ \"acc_stderr\": 0.023904914311782658,\n \"acc_norm\": 0.7709677419354839,\n\
\ \"acc_norm_stderr\": 0.023904914311782658\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n\
\ \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \"acc_norm\"\
: 0.67,\n \"acc_norm_stderr\": 0.04725815626252609\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7515151515151515,\n \"acc_stderr\": 0.033744026441394036,\n\
\ \"acc_norm\": 0.7515151515151515,\n \"acc_norm_stderr\": 0.033744026441394036\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.8232323232323232,\n \"acc_stderr\": 0.027178752639044915,\n \"\
acc_norm\": 0.8232323232323232,\n \"acc_norm_stderr\": 0.027178752639044915\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919443,\n\
\ \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919443\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563973,\n\
\ \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563973\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060846,\n \
\ \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060846\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n\
\ \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"\
acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"\
acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5046296296296297,\n \"acc_stderr\": 0.03409825519163572,\n \"\
acc_norm\": 0.5046296296296297,\n \"acc_norm_stderr\": 0.03409825519163572\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.8333333333333334,\n \"acc_stderr\": 0.026156867523931045,\n \"\
acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.026156867523931045\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7848101265822784,\n \"acc_stderr\": 0.026750826994676177,\n \
\ \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.026750826994676177\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n\
\ \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n\
\ \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n\
\ \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"\
acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n\
\ \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n\
\ \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n\
\ \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n\
\ \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \
\ \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n\
\ \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n\
\ \"acc_stderr\": 0.0230866350868414,\n \"acc_norm\": 0.8547008547008547,\n\
\ \"acc_norm_stderr\": 0.0230866350868414\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \
\ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8301404853128991,\n\
\ \"acc_stderr\": 0.013428186370608311,\n \"acc_norm\": 0.8301404853128991,\n\
\ \"acc_norm_stderr\": 0.013428186370608311\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7167630057803468,\n \"acc_stderr\": 0.024257901705323378,\n\
\ \"acc_norm\": 0.7167630057803468,\n \"acc_norm_stderr\": 0.024257901705323378\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3541899441340782,\n\
\ \"acc_stderr\": 0.015995644947299232,\n \"acc_norm\": 0.3541899441340782,\n\
\ \"acc_norm_stderr\": 0.015995644947299232\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.02526169121972948,\n\
\ \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.02526169121972948\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6816720257234726,\n\
\ \"acc_stderr\": 0.026457225067811025,\n \"acc_norm\": 0.6816720257234726,\n\
\ \"acc_norm_stderr\": 0.026457225067811025\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7345679012345679,\n \"acc_stderr\": 0.024569223600460845,\n\
\ \"acc_norm\": 0.7345679012345679,\n \"acc_norm_stderr\": 0.024569223600460845\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4787234042553192,\n \"acc_stderr\": 0.029800481645628693,\n \
\ \"acc_norm\": 0.4787234042553192,\n \"acc_norm_stderr\": 0.029800481645628693\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4530638852672751,\n\
\ \"acc_stderr\": 0.012713845972358981,\n \"acc_norm\": 0.4530638852672751,\n\
\ \"acc_norm_stderr\": 0.012713845972358981\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6691176470588235,\n \"acc_stderr\": 0.02858270975389845,\n\
\ \"acc_norm\": 0.6691176470588235,\n \"acc_norm_stderr\": 0.02858270975389845\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6715686274509803,\n \"acc_stderr\": 0.01899970738316267,\n \
\ \"acc_norm\": 0.6715686274509803,\n \"acc_norm_stderr\": 0.01899970738316267\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n\
\ \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n\
\ \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128445,\n\
\ \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128445\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n\
\ \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n\
\ \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \
\ \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n\
\ \"acc_stderr\": 0.03889951252827216,\n \"acc_norm\": 0.5180722891566265,\n\
\ \"acc_norm_stderr\": 0.03889951252827216\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n\
\ \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.38310893512851896,\n\
\ \"mc1_stderr\": 0.017018461679389855,\n \"mc2\": 0.5466386336115909,\n\
\ \"mc2_stderr\": 0.015507674046261742\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7797947908445146,\n \"acc_stderr\": 0.011646276755089688\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5238817285822593,\n \
\ \"acc_stderr\": 0.013756765835465755\n }\n}\n```"
repo_url: https://huggingface.co/chargoddard/mistral-11b-slimorca
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|arc:challenge|25_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|gsm8k|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hellaswag|10_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-01-08T07-32-00.985160.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-management|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-virology|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|truthfulqa:mc|0_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-01-08T07-32-00.985160.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- '**/details_harness|winogrande|5_2024-01-08T07-32-00.985160.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-01-08T07-32-00.985160.parquet'
- config_name: results
data_files:
- split: 2024_01_08T07_32_00.985160
path:
- results_2024-01-08T07-32-00.985160.parquet
- split: latest
path:
- results_2024-01-08T07-32-00.985160.parquet
---
# Dataset Card for Evaluation run of chargoddard/mistral-11b-slimorca
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [chargoddard/mistral-11b-slimorca](https://huggingface.co/chargoddard/mistral-11b-slimorca) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_chargoddard__mistral-11b-slimorca",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-01-08T07:32:00.985160](https://huggingface.co/datasets/open-llm-leaderboard/details_chargoddard__mistral-11b-slimorca/blob/main/results_2024-01-08T07-32-00.985160.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6367892367382408,
"acc_stderr": 0.032420458743968754,
"acc_norm": 0.6401126581266797,
"acc_norm_stderr": 0.03306588858456459,
"mc1": 0.38310893512851896,
"mc1_stderr": 0.017018461679389855,
"mc2": 0.5466386336115909,
"mc2_stderr": 0.015507674046261742
},
"harness|arc:challenge|25": {
"acc": 0.6075085324232082,
"acc_stderr": 0.01426963463567073,
"acc_norm": 0.6424914675767918,
"acc_norm_stderr": 0.014005494275916573
},
"harness|hellaswag|10": {
"acc": 0.6468830910177256,
"acc_stderr": 0.004769618829196511,
"acc_norm": 0.8380800637323242,
"acc_norm_stderr": 0.0036762448867232586
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.04793724854411021,
"acc_norm": 0.35,
"acc_norm_stderr": 0.04793724854411021
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6148148148148148,
"acc_stderr": 0.04203921040156279,
"acc_norm": 0.6148148148148148,
"acc_norm_stderr": 0.04203921040156279
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6776315789473685,
"acc_stderr": 0.03803510248351585,
"acc_norm": 0.6776315789473685,
"acc_norm_stderr": 0.03803510248351585
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.690566037735849,
"acc_stderr": 0.028450154794118637,
"acc_norm": 0.690566037735849,
"acc_norm_stderr": 0.028450154794118637
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.037455547914624555,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.037455547914624555
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.52,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.52,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6069364161849711,
"acc_stderr": 0.0372424959581773,
"acc_norm": 0.6069364161849711,
"acc_norm_stderr": 0.0372424959581773
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4215686274509804,
"acc_stderr": 0.04913595201274498,
"acc_norm": 0.4215686274509804,
"acc_norm_stderr": 0.04913595201274498
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.76,
"acc_norm_stderr": 0.042923469599092816
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5702127659574469,
"acc_stderr": 0.03236214467715564,
"acc_norm": 0.5702127659574469,
"acc_norm_stderr": 0.03236214467715564
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.4824561403508772,
"acc_stderr": 0.04700708033551038,
"acc_norm": 0.4824561403508772,
"acc_norm_stderr": 0.04700708033551038
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5310344827586206,
"acc_stderr": 0.04158632762097828,
"acc_norm": 0.5310344827586206,
"acc_norm_stderr": 0.04158632762097828
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41005291005291006,
"acc_stderr": 0.02533120243894443,
"acc_norm": 0.41005291005291006,
"acc_norm_stderr": 0.02533120243894443
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.46825396825396826,
"acc_stderr": 0.04463112720677172,
"acc_norm": 0.46825396825396826,
"acc_norm_stderr": 0.04463112720677172
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7709677419354839,
"acc_stderr": 0.023904914311782658,
"acc_norm": 0.7709677419354839,
"acc_norm_stderr": 0.023904914311782658
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5073891625615764,
"acc_stderr": 0.035176035403610105,
"acc_norm": 0.5073891625615764,
"acc_norm_stderr": 0.035176035403610105
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.67,
"acc_stderr": 0.04725815626252609,
"acc_norm": 0.67,
"acc_norm_stderr": 0.04725815626252609
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7515151515151515,
"acc_stderr": 0.033744026441394036,
"acc_norm": 0.7515151515151515,
"acc_norm_stderr": 0.033744026441394036
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8232323232323232,
"acc_stderr": 0.027178752639044915,
"acc_norm": 0.8232323232323232,
"acc_norm_stderr": 0.027178752639044915
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8860103626943006,
"acc_stderr": 0.022935144053919443,
"acc_norm": 0.8860103626943006,
"acc_norm_stderr": 0.022935144053919443
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6641025641025641,
"acc_stderr": 0.023946724741563973,
"acc_norm": 0.6641025641025641,
"acc_norm_stderr": 0.023946724741563973
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.31851851851851853,
"acc_stderr": 0.02840653309060846,
"acc_norm": 0.31851851851851853,
"acc_norm_stderr": 0.02840653309060846
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6848739495798319,
"acc_stderr": 0.030176808288974337,
"acc_norm": 0.6848739495798319,
"acc_norm_stderr": 0.030176808288974337
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3509933774834437,
"acc_stderr": 0.03896981964257375,
"acc_norm": 0.3509933774834437,
"acc_norm_stderr": 0.03896981964257375
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8477064220183487,
"acc_stderr": 0.015405084393157074,
"acc_norm": 0.8477064220183487,
"acc_norm_stderr": 0.015405084393157074
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5046296296296297,
"acc_stderr": 0.03409825519163572,
"acc_norm": 0.5046296296296297,
"acc_norm_stderr": 0.03409825519163572
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8333333333333334,
"acc_stderr": 0.026156867523931045,
"acc_norm": 0.8333333333333334,
"acc_norm_stderr": 0.026156867523931045
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7848101265822784,
"acc_stderr": 0.026750826994676177,
"acc_norm": 0.7848101265822784,
"acc_norm_stderr": 0.026750826994676177
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.695067264573991,
"acc_stderr": 0.030898610882477515,
"acc_norm": 0.695067264573991,
"acc_norm_stderr": 0.030898610882477515
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7709923664122137,
"acc_stderr": 0.036853466317118506,
"acc_norm": 0.7709923664122137,
"acc_norm_stderr": 0.036853466317118506
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7768595041322314,
"acc_stderr": 0.03800754475228733,
"acc_norm": 0.7768595041322314,
"acc_norm_stderr": 0.03800754475228733
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8055555555555556,
"acc_stderr": 0.038260763248848646,
"acc_norm": 0.8055555555555556,
"acc_norm_stderr": 0.038260763248848646
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7668711656441718,
"acc_stderr": 0.0332201579577674,
"acc_norm": 0.7668711656441718,
"acc_norm_stderr": 0.0332201579577674
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4375,
"acc_stderr": 0.04708567521880525,
"acc_norm": 0.4375,
"acc_norm_stderr": 0.04708567521880525
},
"harness|hendrycksTest-management|5": {
"acc": 0.7766990291262136,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.7766990291262136,
"acc_norm_stderr": 0.04123553189891431
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8547008547008547,
"acc_stderr": 0.0230866350868414,
"acc_norm": 0.8547008547008547,
"acc_norm_stderr": 0.0230866350868414
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.72,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.72,
"acc_norm_stderr": 0.045126085985421276
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8301404853128991,
"acc_stderr": 0.013428186370608311,
"acc_norm": 0.8301404853128991,
"acc_norm_stderr": 0.013428186370608311
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7167630057803468,
"acc_stderr": 0.024257901705323378,
"acc_norm": 0.7167630057803468,
"acc_norm_stderr": 0.024257901705323378
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.3541899441340782,
"acc_stderr": 0.015995644947299232,
"acc_norm": 0.3541899441340782,
"acc_norm_stderr": 0.015995644947299232
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7352941176470589,
"acc_stderr": 0.02526169121972948,
"acc_norm": 0.7352941176470589,
"acc_norm_stderr": 0.02526169121972948
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6816720257234726,
"acc_stderr": 0.026457225067811025,
"acc_norm": 0.6816720257234726,
"acc_norm_stderr": 0.026457225067811025
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7345679012345679,
"acc_stderr": 0.024569223600460845,
"acc_norm": 0.7345679012345679,
"acc_norm_stderr": 0.024569223600460845
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4787234042553192,
"acc_stderr": 0.029800481645628693,
"acc_norm": 0.4787234042553192,
"acc_norm_stderr": 0.029800481645628693
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4530638852672751,
"acc_stderr": 0.012713845972358981,
"acc_norm": 0.4530638852672751,
"acc_norm_stderr": 0.012713845972358981
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6691176470588235,
"acc_stderr": 0.02858270975389845,
"acc_norm": 0.6691176470588235,
"acc_norm_stderr": 0.02858270975389845
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6715686274509803,
"acc_stderr": 0.01899970738316267,
"acc_norm": 0.6715686274509803,
"acc_norm_stderr": 0.01899970738316267
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6454545454545455,
"acc_stderr": 0.045820048415054174,
"acc_norm": 0.6454545454545455,
"acc_norm_stderr": 0.045820048415054174
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.726530612244898,
"acc_stderr": 0.028535560337128445,
"acc_norm": 0.726530612244898,
"acc_norm_stderr": 0.028535560337128445
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.835820895522388,
"acc_stderr": 0.026193923544454125,
"acc_norm": 0.835820895522388,
"acc_norm_stderr": 0.026193923544454125
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.84,
"acc_stderr": 0.03684529491774708,
"acc_norm": 0.84,
"acc_norm_stderr": 0.03684529491774708
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5180722891566265,
"acc_stderr": 0.03889951252827216,
"acc_norm": 0.5180722891566265,
"acc_norm_stderr": 0.03889951252827216
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8070175438596491,
"acc_stderr": 0.030267457554898458,
"acc_norm": 0.8070175438596491,
"acc_norm_stderr": 0.030267457554898458
},
"harness|truthfulqa:mc|0": {
"mc1": 0.38310893512851896,
"mc1_stderr": 0.017018461679389855,
"mc2": 0.5466386336115909,
"mc2_stderr": 0.015507674046261742
},
"harness|winogrande|5": {
"acc": 0.7797947908445146,
"acc_stderr": 0.011646276755089688
},
"harness|gsm8k|5": {
"acc": 0.5238817285822593,
"acc_stderr": 0.013756765835465755
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
CyberHarem/colorado_azurlane | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of colorado/コロラド/科罗拉多 (Azur Lane)
This is the dataset of colorado/コロラド/科罗拉多 (Azur Lane), containing 120 images and their tags.
The core tags of this character are `long_hair, red_eyes, white_hair, breasts, hairband, hair_over_one_eye, large_breasts, bangs, red_hairband`, which are pruned in this dataset.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
## List of Packages
| Name | Images | Size | Download | Type | Description |
|:-----------------|---------:|:-----------|:-------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------|
| raw | 120 | 153.85 MiB | [Download](https://huggingface.co/datasets/CyberHarem/colorado_azurlane/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). |
| 800 | 120 | 82.39 MiB | [Download](https://huggingface.co/datasets/CyberHarem/colorado_azurlane/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. |
| stage3-p480-800 | 302 | 181.15 MiB | [Download](https://huggingface.co/datasets/CyberHarem/colorado_azurlane/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
| 1200 | 120 | 135.42 MiB | [Download](https://huggingface.co/datasets/CyberHarem/colorado_azurlane/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. |
| stage3-p480-1200 | 302 | 265.98 MiB | [Download](https://huggingface.co/datasets/CyberHarem/colorado_azurlane/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
### Load Raw Dataset with Waifuc
We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code
```python
import os
import zipfile
from huggingface_hub import hf_hub_download
from waifuc.source import LocalSource
# download raw archive file
zip_file = hf_hub_download(
repo_id='CyberHarem/colorado_azurlane',
repo_type='dataset',
filename='dataset-raw.zip',
)
# extract files to your directory
dataset_dir = 'dataset_dir'
os.makedirs(dataset_dir, exist_ok=True)
with zipfile.ZipFile(zip_file, 'r') as zf:
zf.extractall(dataset_dir)
# load the dataset with waifuc
source = LocalSource(dataset_dir)
for item in source:
print(item.image, item.meta['filename'], item.meta['tags'])
```
## List of Clusters
List of tag clustering result, maybe some outfits can be mined here.
### Raw Text Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| 0 | 15 |  |  |  |  |  | 1girl, bare_shoulders, looking_at_viewer, solo, detached_sleeves, sideboob, sleeveless_dress, belt, coat |
| 1 | 7 |  |  |  |  |  | 1girl, sideboob, sleeveless_dress, solo, thigh_boots, thighhighs, detached_sleeves, lace-up_boots, looking_at_viewer, bare_shoulders, coat, simple_background, white_background, belt |
| 2 | 13 |  |  |  |  |  | 1girl, official_alternate_costume, solo, black_jacket, guitar_case, black_hairband, braid, white_sweater, scarf_over_mouth, white_scarf, looking_at_viewer, disposable_cup, holding_cup, open_clothes, black_thighhighs, simple_background, white_background, bird_on_head |
### Table Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | bare_shoulders | looking_at_viewer | solo | detached_sleeves | sideboob | sleeveless_dress | belt | coat | thigh_boots | thighhighs | lace-up_boots | simple_background | white_background | official_alternate_costume | black_jacket | guitar_case | black_hairband | braid | white_sweater | scarf_over_mouth | white_scarf | disposable_cup | holding_cup | open_clothes | black_thighhighs | bird_on_head |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-----------------|:--------------------|:-------|:-------------------|:-----------|:-------------------|:-------|:-------|:--------------|:-------------|:----------------|:--------------------|:-------------------|:-----------------------------|:---------------|:--------------|:-----------------|:--------|:----------------|:-------------------|:--------------|:-----------------|:--------------|:---------------|:-------------------|:---------------|
| 0 | 15 |  |  |  |  |  | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | |
| 1 | 7 |  |  |  |  |  | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | |
| 2 | 13 |  |  |  |  |  | X | | X | X | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
|
Reasat/kather-19_test | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': ADI
'1': BACK
'2': DEB
'3': LYM
'4': MUC
'5': MUS
'6': NORM
'7': STR
'8': TUM
splits:
- name: train
num_bytes: 1093018719.36
num_examples: 7180
download_size: 941913399
dataset_size: 1093018719.36
---
# Dataset Card for "kather-19_test"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
MinusV25/arxiv-1000-sample | ---
license: apache-2.0
---
|
LucasMagnana/ARASAAC_CACE | ---
dataset_info:
features:
- name: text
dtype: string
- name: id
dtype: int64
splits:
- name: train
num_bytes: 15417
num_examples: 803
download_size: 13416
dataset_size: 15417
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
JaydeepPatel/gujarati-alpaca52K | ---
license: cc-by-nc-4.0
---
|
PlayerJ/iOneBot_Custom_llama2 | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 7527
num_examples: 19
download_size: 3528
dataset_size: 7527
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
AnushaKulkarni/preferred_dataset | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: chosen
struct:
- name: rank
dtype: int32
- name: response
dtype: string
- name: rejected
struct:
- name: rank
dtype: int32
- name: response
dtype: string
splits:
- name: train
num_bytes: 33413
num_examples: 50
download_size: 29993
dataset_size: 33413
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
mattlc/deepgui | ---
dataset_info:
features:
- name: file_name
dtype: image
- name: caption
dtype: string
- name: conditioning_image
dtype: image
splits:
- name: train
num_bytes: 184059306.0
num_examples: 400
download_size: 85178148
dataset_size: 184059306.0
---
# Dataset Card for "deepgui_v2"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
DBQ/Loro.Piana.Product.prices.France | ---
annotations_creators:
- other
language_creators:
- other
language:
- en
license:
- unknown
multilinguality:
- monolingual
source_datasets:
- original
task_categories:
- text-classification
- image-classification
- feature-extraction
- image-segmentation
- image-to-image
- image-to-text
- object-detection
- summarization
- zero-shot-image-classification
pretty_name: France - Loro Piana - Product-level price list
tags:
- webscraping
- ecommerce
- Loro Piana
- fashion
- fashion product
- image
- fashion image
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: website_name
dtype: string
- name: competence_date
dtype: string
- name: country_code
dtype: string
- name: currency_code
dtype: string
- name: brand
dtype: string
- name: category1_code
dtype: string
- name: category2_code
dtype: string
- name: category3_code
dtype: string
- name: product_code
dtype: string
- name: title
dtype: string
- name: itemurl
dtype: string
- name: imageurl
dtype: string
- name: full_price
dtype: float64
- name: price
dtype: float64
- name: full_price_eur
dtype: float64
- name: price_eur
dtype: float64
- name: flg_discount
dtype: int64
splits:
- name: train
num_bytes: 389433
num_examples: 1128
download_size: 131337
dataset_size: 389433
---
# Loro Piana web scraped data
## About the website
In the European, Middle Eastern and African (EMEA) region, specifically France, **Loro Piana** operates within the **luxury fashion industry**. This sector is renowned for its fine craftsmanship, superior quality, and the high value attached to its products. France, in particular, has long been a global center of luxury fashion with its capital, Paris, often referred to as the "fashion capital of the world". The industry is continuously evolving, incorporating **Ecommerce** and online platforms into their business models to meet the changing demands and preferences of its upscale clientele. In this context, we observed a dataset consisting of **Ecommerce product-list page (PLP) data** on Loro Piana in France.
## Link to **dataset**
[France - Loro Piana - Product-level price list dataset](https://www.databoutique.com/buy-data-page/Loro%20Piana%20Product-prices%20France/r/recyuT3Im5gEtayt6)
|
RikoteMaster/isear_augmented | ---
dataset_info:
features:
- name: Text_processed
dtype: string
- name: Emotion
dtype: string
- name: Augmented
dtype: bool
splits:
- name: train
num_bytes: 1130723
num_examples: 7499
- name: validation
num_bytes: 198379
num_examples: 1324
- name: test
num_bytes: 250735
num_examples: 1879
download_size: 923389
dataset_size: 1579837
---
# Dataset Card for "isear_augmented"
Classical ISEAR dataset augmented using gpt3.5 prompt tunning.
Example prompt:
Hello, you are going to take care of the task of increasing data in text. The data format that I am going to pass you is going to be as follows. Sentence: this is a sample text PROHIBITED WORD: this is a sample PROHIBITED WORD 2. Sentence: this is a sample text PROHIBITED WORD: this is a sample PROHIBITED WORD. I can enter as many sentences as I want, you must respect the logic that I have marked. NOW YOUR TASK MUST BE TO REFORMULATE THE SENTENCES IN ORDER TO EXPRESS THE SENTIMENT OF THE PROHIBITED WORD BUT YOU CANT USE THE PROHIBITED WORD BECAUSE IS FORBIDEN. PROHIBITED WORD LIST = [anger, fear, love, sadness, guilt, joy, shame, Overwhelming, remorse] you cannot use none of this words in the reformulation process, also you cannot use words derivated from this words and you must not do aclarations about what the text is trying to transmit. The output of the reformulated sentences must be Reformulated sentence 1: LorenIpsum reformulated sentence 2: LorenIpsum reformulated. Remember, I can introduce more than two sentences so you must return the reformulation of each sentence to me. So, remember, you must get a sentence that prevails the sentiment called the sentence PROHIBITED WORD but it is more important that the word does not appear inside the reformulation. If the word appears within the reformulation we would be entering into an incorrect practice of data augmentation. Yann LeCunn is watching, your bosses are watching you too, you must do what I ask you to do and get me the best sentences possible, but remember, without using the PROHIBITED WORD in the reformulation.1. Sentece: Unexpected visit by a close friend, whom I hadn't seen for half a
year. PROHIBITED WORD: joy
2. Sentece: I wandered by mistake into the safety zone of a shooting range,
and was shot at. PROHIBITED WORD: fear
3. Sentece: Being treated unfairly. PROHIBITED WORD: anger
4. Sentece: Breaking up with a girl. PROHIBITED WORD: sadness
5. Sentece: Nothing. PROHIBITED WORD: disgust
6. Sentece: None. PROHIBITED WORD: shame
7. Sentece: Little contact with my father before he died. PROHIBITED WORD: guilt
8. Sentece: When I was accepted as a student at the college, not having
thought it possible. PROHIBITED WORD: joy
The results obtained were quite well but when gpt obtain a way to express a sentiment with concrete words he is going to repeat it the structure, so you must reestructure the prompt
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
open-llm-leaderboard/details_cgato__Thespis-CurtainCall-7b-v0.3 | ---
pretty_name: Evaluation run of cgato/Thespis-CurtainCall-7b-v0.3
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [cgato/Thespis-CurtainCall-7b-v0.3](https://huggingface.co/cgato/Thespis-CurtainCall-7b-v0.3)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cgato__Thespis-CurtainCall-7b-v0.3\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-03-14T19:10:45.068833](https://huggingface.co/datasets/open-llm-leaderboard/details_cgato__Thespis-CurtainCall-7b-v0.3/blob/main/results_2024-03-14T19-10-45.068833.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6209088671220813,\n\
\ \"acc_stderr\": 0.032670238010700646,\n \"acc_norm\": 0.626293015631346,\n\
\ \"acc_norm_stderr\": 0.033333675763785604,\n \"mc1\": 0.35006119951040393,\n\
\ \"mc1_stderr\": 0.01669794942015103,\n \"mc2\": 0.5095336916736214,\n\
\ \"mc2_stderr\": 0.015077729128899325\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6032423208191127,\n \"acc_stderr\": 0.014296513020180644,\n\
\ \"acc_norm\": 0.6424914675767918,\n \"acc_norm_stderr\": 0.014005494275916573\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6328420633339972,\n\
\ \"acc_stderr\": 0.004810449343572395,\n \"acc_norm\": 0.8293168691495718,\n\
\ \"acc_norm_stderr\": 0.003754629313275157\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\
\ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n\
\ \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n\
\ \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6710526315789473,\n \"acc_stderr\": 0.038234289699266046,\n\
\ \"acc_norm\": 0.6710526315789473,\n \"acc_norm_stderr\": 0.038234289699266046\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n\
\ \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \
\ \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6943396226415094,\n \"acc_stderr\": 0.028353298073322666,\n\
\ \"acc_norm\": 0.6943396226415094,\n \"acc_norm_stderr\": 0.028353298073322666\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6944444444444444,\n\
\ \"acc_stderr\": 0.03852084696008534,\n \"acc_norm\": 0.6944444444444444,\n\
\ \"acc_norm_stderr\": 0.03852084696008534\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \
\ \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n\
\ \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.630057803468208,\n\
\ \"acc_stderr\": 0.0368122963339432,\n \"acc_norm\": 0.630057803468208,\n\
\ \"acc_norm_stderr\": 0.0368122963339432\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201942,\n\
\ \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201942\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.78,\n \"acc_stderr\": 0.04163331998932263,\n \"acc_norm\": 0.78,\n\
\ \"acc_norm_stderr\": 0.04163331998932263\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5361702127659574,\n \"acc_stderr\": 0.032600385118357715,\n\
\ \"acc_norm\": 0.5361702127659574,\n \"acc_norm_stderr\": 0.032600385118357715\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n\
\ \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n\
\ \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n\
\ \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.3968253968253968,\n \"acc_stderr\": 0.025197101074246483,\n \"\
acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.025197101074246483\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4126984126984127,\n\
\ \"acc_stderr\": 0.04403438954768176,\n \"acc_norm\": 0.4126984126984127,\n\
\ \"acc_norm_stderr\": 0.04403438954768176\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \
\ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7354838709677419,\n\
\ \"acc_stderr\": 0.02509189237885928,\n \"acc_norm\": 0.7354838709677419,\n\
\ \"acc_norm_stderr\": 0.02509189237885928\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n\
\ \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.65,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\"\
: 0.65,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n\
\ \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7878787878787878,\n \"acc_stderr\": 0.02912652283458682,\n \"\
acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.02912652283458682\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.026148483469153303,\n\
\ \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.026148483469153303\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6410256410256411,\n \"acc_stderr\": 0.02432173848460235,\n \
\ \"acc_norm\": 0.6410256410256411,\n \"acc_norm_stderr\": 0.02432173848460235\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3074074074074074,\n \"acc_stderr\": 0.028133252578815632,\n \
\ \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.028133252578815632\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.031041941304059285,\n\
\ \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.031041941304059285\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\"\
: 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n\
\ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8146788990825689,\n\
\ \"acc_stderr\": 0.016659279700295827,\n \"acc_norm\": 0.8146788990825689,\n\
\ \"acc_norm_stderr\": 0.016659279700295827\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\
: {\n \"acc\": 0.46296296296296297,\n \"acc_stderr\": 0.03400603625538272,\n\
\ \"acc_norm\": 0.46296296296296297,\n \"acc_norm_stderr\": 0.03400603625538272\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.7990196078431373,\n \"acc_stderr\": 0.028125972265654373,\n \"\
acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.028125972265654373\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7890295358649789,\n \"acc_stderr\": 0.02655837250266192,\n \
\ \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.02655837250266192\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n\
\ \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n\
\ \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7404580152671756,\n \"acc_stderr\": 0.03844876139785271,\n\
\ \"acc_norm\": 0.7404580152671756,\n \"acc_norm_stderr\": 0.03844876139785271\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098823,\n \"\
acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098823\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n\
\ \"acc_stderr\": 0.03957835471980979,\n \"acc_norm\": 0.7870370370370371,\n\
\ \"acc_norm_stderr\": 0.03957835471980979\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n\
\ \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n\
\ \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n\
\ \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n\
\ \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n\
\ \"acc_stderr\": 0.022801382534597528,\n \"acc_norm\": 0.8589743589743589,\n\
\ \"acc_norm_stderr\": 0.022801382534597528\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \
\ \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8148148148148148,\n\
\ \"acc_stderr\": 0.013890862162876166,\n \"acc_norm\": 0.8148148148148148,\n\
\ \"acc_norm_stderr\": 0.013890862162876166\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.0246853168672578,\n\
\ \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.0246853168672578\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.33854748603351953,\n\
\ \"acc_stderr\": 0.01582670009648135,\n \"acc_norm\": 0.33854748603351953,\n\
\ \"acc_norm_stderr\": 0.01582670009648135\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.6895424836601307,\n \"acc_stderr\": 0.026493033225145898,\n\
\ \"acc_norm\": 0.6895424836601307,\n \"acc_norm_stderr\": 0.026493033225145898\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n\
\ \"acc_stderr\": 0.025922371788818763,\n \"acc_norm\": 0.7041800643086816,\n\
\ \"acc_norm_stderr\": 0.025922371788818763\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7098765432098766,\n \"acc_stderr\": 0.025251173936495033,\n\
\ \"acc_norm\": 0.7098765432098766,\n \"acc_norm_stderr\": 0.025251173936495033\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \
\ \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4439374185136897,\n\
\ \"acc_stderr\": 0.012689708167787686,\n \"acc_norm\": 0.4439374185136897,\n\
\ \"acc_norm_stderr\": 0.012689708167787686\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.625,\n \"acc_stderr\": 0.029408372932278746,\n \
\ \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.029408372932278746\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6535947712418301,\n \"acc_stderr\": 0.01924978569171721,\n \
\ \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.01924978569171721\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n\
\ \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n\
\ \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7183673469387755,\n \"acc_stderr\": 0.028795185574291296,\n\
\ \"acc_norm\": 0.7183673469387755,\n \"acc_norm_stderr\": 0.028795185574291296\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n\
\ \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n\
\ \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \
\ \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n\
\ \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n\
\ \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n\
\ \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.35006119951040393,\n\
\ \"mc1_stderr\": 0.01669794942015103,\n \"mc2\": 0.5095336916736214,\n\
\ \"mc2_stderr\": 0.015077729128899325\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7861089187056038,\n \"acc_stderr\": 0.011524466954090248\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3737680060652009,\n \
\ \"acc_stderr\": 0.013326342860737021\n }\n}\n```"
repo_url: https://huggingface.co/cgato/Thespis-CurtainCall-7b-v0.3
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|arc:challenge|25_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|gsm8k|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hellaswag|10_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-14T19-10-45.068833.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-14T19-10-45.068833.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- '**/details_harness|winogrande|5_2024-03-14T19-10-45.068833.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-03-14T19-10-45.068833.parquet'
- config_name: results
data_files:
- split: 2024_03_14T19_10_45.068833
path:
- results_2024-03-14T19-10-45.068833.parquet
- split: latest
path:
- results_2024-03-14T19-10-45.068833.parquet
---
# Dataset Card for Evaluation run of cgato/Thespis-CurtainCall-7b-v0.3
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [cgato/Thespis-CurtainCall-7b-v0.3](https://huggingface.co/cgato/Thespis-CurtainCall-7b-v0.3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_cgato__Thespis-CurtainCall-7b-v0.3",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-03-14T19:10:45.068833](https://huggingface.co/datasets/open-llm-leaderboard/details_cgato__Thespis-CurtainCall-7b-v0.3/blob/main/results_2024-03-14T19-10-45.068833.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6209088671220813,
"acc_stderr": 0.032670238010700646,
"acc_norm": 0.626293015631346,
"acc_norm_stderr": 0.033333675763785604,
"mc1": 0.35006119951040393,
"mc1_stderr": 0.01669794942015103,
"mc2": 0.5095336916736214,
"mc2_stderr": 0.015077729128899325
},
"harness|arc:challenge|25": {
"acc": 0.6032423208191127,
"acc_stderr": 0.014296513020180644,
"acc_norm": 0.6424914675767918,
"acc_norm_stderr": 0.014005494275916573
},
"harness|hellaswag|10": {
"acc": 0.6328420633339972,
"acc_stderr": 0.004810449343572395,
"acc_norm": 0.8293168691495718,
"acc_norm_stderr": 0.003754629313275157
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5777777777777777,
"acc_stderr": 0.04266763404099582,
"acc_norm": 0.5777777777777777,
"acc_norm_stderr": 0.04266763404099582
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6710526315789473,
"acc_stderr": 0.038234289699266046,
"acc_norm": 0.6710526315789473,
"acc_norm_stderr": 0.038234289699266046
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.57,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.57,
"acc_norm_stderr": 0.049756985195624284
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6943396226415094,
"acc_stderr": 0.028353298073322666,
"acc_norm": 0.6943396226415094,
"acc_norm_stderr": 0.028353298073322666
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.6944444444444444,
"acc_stderr": 0.03852084696008534,
"acc_norm": 0.6944444444444444,
"acc_norm_stderr": 0.03852084696008534
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.630057803468208,
"acc_stderr": 0.0368122963339432,
"acc_norm": 0.630057803468208,
"acc_norm_stderr": 0.0368122963339432
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04690650298201942,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04690650298201942
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.78,
"acc_stderr": 0.04163331998932263,
"acc_norm": 0.78,
"acc_norm_stderr": 0.04163331998932263
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5361702127659574,
"acc_stderr": 0.032600385118357715,
"acc_norm": 0.5361702127659574,
"acc_norm_stderr": 0.032600385118357715
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.04702880432049615,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.04702880432049615
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5517241379310345,
"acc_stderr": 0.04144311810878152,
"acc_norm": 0.5517241379310345,
"acc_norm_stderr": 0.04144311810878152
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3968253968253968,
"acc_stderr": 0.025197101074246483,
"acc_norm": 0.3968253968253968,
"acc_norm_stderr": 0.025197101074246483
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4126984126984127,
"acc_stderr": 0.04403438954768176,
"acc_norm": 0.4126984126984127,
"acc_norm_stderr": 0.04403438954768176
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7354838709677419,
"acc_stderr": 0.02509189237885928,
"acc_norm": 0.7354838709677419,
"acc_norm_stderr": 0.02509189237885928
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5024630541871922,
"acc_stderr": 0.035179450386910616,
"acc_norm": 0.5024630541871922,
"acc_norm_stderr": 0.035179450386910616
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.65,
"acc_stderr": 0.04793724854411019,
"acc_norm": 0.65,
"acc_norm_stderr": 0.04793724854411019
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7696969696969697,
"acc_stderr": 0.0328766675860349,
"acc_norm": 0.7696969696969697,
"acc_norm_stderr": 0.0328766675860349
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7878787878787878,
"acc_stderr": 0.02912652283458682,
"acc_norm": 0.7878787878787878,
"acc_norm_stderr": 0.02912652283458682
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.844559585492228,
"acc_stderr": 0.026148483469153303,
"acc_norm": 0.844559585492228,
"acc_norm_stderr": 0.026148483469153303
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6410256410256411,
"acc_stderr": 0.02432173848460235,
"acc_norm": 0.6410256410256411,
"acc_norm_stderr": 0.02432173848460235
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3074074074074074,
"acc_stderr": 0.028133252578815632,
"acc_norm": 0.3074074074074074,
"acc_norm_stderr": 0.028133252578815632
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6470588235294118,
"acc_stderr": 0.031041941304059285,
"acc_norm": 0.6470588235294118,
"acc_norm_stderr": 0.031041941304059285
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.304635761589404,
"acc_stderr": 0.03757949922943343,
"acc_norm": 0.304635761589404,
"acc_norm_stderr": 0.03757949922943343
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8146788990825689,
"acc_stderr": 0.016659279700295827,
"acc_norm": 0.8146788990825689,
"acc_norm_stderr": 0.016659279700295827
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.46296296296296297,
"acc_stderr": 0.03400603625538272,
"acc_norm": 0.46296296296296297,
"acc_norm_stderr": 0.03400603625538272
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7990196078431373,
"acc_stderr": 0.028125972265654373,
"acc_norm": 0.7990196078431373,
"acc_norm_stderr": 0.028125972265654373
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7890295358649789,
"acc_stderr": 0.02655837250266192,
"acc_norm": 0.7890295358649789,
"acc_norm_stderr": 0.02655837250266192
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6905829596412556,
"acc_stderr": 0.03102441174057221,
"acc_norm": 0.6905829596412556,
"acc_norm_stderr": 0.03102441174057221
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7404580152671756,
"acc_stderr": 0.03844876139785271,
"acc_norm": 0.7404580152671756,
"acc_norm_stderr": 0.03844876139785271
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.03695980128098823,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.03695980128098823
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7870370370370371,
"acc_stderr": 0.03957835471980979,
"acc_norm": 0.7870370370370371,
"acc_norm_stderr": 0.03957835471980979
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7423312883435583,
"acc_stderr": 0.03436150827846917,
"acc_norm": 0.7423312883435583,
"acc_norm_stderr": 0.03436150827846917
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4642857142857143,
"acc_stderr": 0.04733667890053756,
"acc_norm": 0.4642857142857143,
"acc_norm_stderr": 0.04733667890053756
},
"harness|hendrycksTest-management|5": {
"acc": 0.7281553398058253,
"acc_stderr": 0.044052680241409216,
"acc_norm": 0.7281553398058253,
"acc_norm_stderr": 0.044052680241409216
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8589743589743589,
"acc_stderr": 0.022801382534597528,
"acc_norm": 0.8589743589743589,
"acc_norm_stderr": 0.022801382534597528
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.73,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8148148148148148,
"acc_stderr": 0.013890862162876166,
"acc_norm": 0.8148148148148148,
"acc_norm_stderr": 0.013890862162876166
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6994219653179191,
"acc_stderr": 0.0246853168672578,
"acc_norm": 0.6994219653179191,
"acc_norm_stderr": 0.0246853168672578
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.33854748603351953,
"acc_stderr": 0.01582670009648135,
"acc_norm": 0.33854748603351953,
"acc_norm_stderr": 0.01582670009648135
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6895424836601307,
"acc_stderr": 0.026493033225145898,
"acc_norm": 0.6895424836601307,
"acc_norm_stderr": 0.026493033225145898
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7041800643086816,
"acc_stderr": 0.025922371788818763,
"acc_norm": 0.7041800643086816,
"acc_norm_stderr": 0.025922371788818763
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7098765432098766,
"acc_stderr": 0.025251173936495033,
"acc_norm": 0.7098765432098766,
"acc_norm_stderr": 0.025251173936495033
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4929078014184397,
"acc_stderr": 0.02982449855912901,
"acc_norm": 0.4929078014184397,
"acc_norm_stderr": 0.02982449855912901
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4439374185136897,
"acc_stderr": 0.012689708167787686,
"acc_norm": 0.4439374185136897,
"acc_norm_stderr": 0.012689708167787686
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.625,
"acc_stderr": 0.029408372932278746,
"acc_norm": 0.625,
"acc_norm_stderr": 0.029408372932278746
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6535947712418301,
"acc_stderr": 0.01924978569171721,
"acc_norm": 0.6535947712418301,
"acc_norm_stderr": 0.01924978569171721
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6636363636363637,
"acc_stderr": 0.04525393596302506,
"acc_norm": 0.6636363636363637,
"acc_norm_stderr": 0.04525393596302506
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7183673469387755,
"acc_stderr": 0.028795185574291296,
"acc_norm": 0.7183673469387755,
"acc_norm_stderr": 0.028795185574291296
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.835820895522388,
"acc_stderr": 0.026193923544454125,
"acc_norm": 0.835820895522388,
"acc_norm_stderr": 0.026193923544454125
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.84,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.84,
"acc_norm_stderr": 0.03684529491774709
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5120481927710844,
"acc_stderr": 0.03891364495835817,
"acc_norm": 0.5120481927710844,
"acc_norm_stderr": 0.03891364495835817
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8070175438596491,
"acc_stderr": 0.030267457554898458,
"acc_norm": 0.8070175438596491,
"acc_norm_stderr": 0.030267457554898458
},
"harness|truthfulqa:mc|0": {
"mc1": 0.35006119951040393,
"mc1_stderr": 0.01669794942015103,
"mc2": 0.5095336916736214,
"mc2_stderr": 0.015077729128899325
},
"harness|winogrande|5": {
"acc": 0.7861089187056038,
"acc_stderr": 0.011524466954090248
},
"harness|gsm8k|5": {
"acc": 0.3737680060652009,
"acc_stderr": 0.013326342860737021
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
sirCamp/hotpot_as2_pairs_and_triplets | ---
dataset_info:
features:
- name: texts
sequence: string
splits:
- name: pairs
num_bytes: 34545896
num_examples: 117828
- name: triplets
num_bytes: 34545896
num_examples: 117828
download_size: 32977152
dataset_size: 69091792
---
# Dataset Card for "hotpot_as2_pairs_and_triplets"
This dataset is a modified version of HotpotAS2 from [lucadiliello/hotpot_as2](https://huggingface.co/datasets/lucadiliello/hotpot_as2) designed to train/pre-train "passage-retrieval" models.
The dataset is divided into two splits:
+ *pairs* split: Each instance in this split consists of a <u>question-(positive)answer pair</u>.
+ *triplets* split: Each instance in this split consists of a <u>question-(positive)answer-(hard-negative)answer triplet</u>.
For more info check out the original HotpotQA dataset in this [repository](https://github.com/lucadiliello/answer-selection) |
beratcmn/parse-gpt-tr | ---
language:
- tr
license: apache-2.0
task_categories:
- question-answering
- text2text-generation
pretty_name: Parse GPT TR
dataset_info:
features:
- name: id
dtype: string
- name: model
dtype: string
- name: messages
list:
- name: index
dtype: int64
- name: role
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 31282
num_examples: 14
download_size: 21619
dataset_size: 31282
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# ParseGPT Turkish
This is a Turkish dataset gathered from various individuals via the [ChatGPT Conversation Parser by beratcmn](https://huggingface.co/spaces/beratcmn/chatgpt-conversation-parser).
This dataset gets updated every time someone uses the ChatGPT Conversation Parser to parse a conversation.
This dataset contains conversations from various individuals. The conversations are in Turkish and are in the form of a list of messages. Each message has an index, a role (either "user" or "assistant"), and the text of the message.
# Contact
If you have any questions, please contact beratcmn on [HuggingFace](https://huggingface.co/beratcmn).
Website:
[beratcimen.com](https://beratcimen.com/)
Email:
beratcmn@hotmail.com
|
sagot/lefff_morpho | ---
license: lgpl-lr
---
# Dataset Card for lefff morpho
## Dataset Description
- **Homepage:** [http://almanach.inria.fr/software_and_resources/custom/Alexina-en.html](http://almanach.inria.fr/software_and_resources/custom/Alexina-en.html)
- **Repository:** [https://gitlab.inria.fr/almanach/alexina/lefff](https://gitlab.inria.fr/almanach/alexina/lefff)
- **Paper:** [http://www.lrec-conf.org/proceedings/lrec2010/pdf/701_Paper.pdf](http://www.lrec-conf.org/proceedings/lrec2010/pdf/701_Paper.pdf)
- **Point of Contact:** [Benoît Sagot](benoit.sagot@inria.fr)
### Dataset Summary
The Lefff, currently in its 3.5 version, is one of the main morphological and syntactic lexicons for French. This Hugging Face dataset provides an easy access to the extensional morphological information in the Lefff, i.e. to the 4-uples (form, lemma, category, morphosyntactic features) and to the amalgams (e.g. _aux_ = _à_ + _les_) it contains. Category and morphosyntactic features are provided both in the original Lefff format and following the UniMorph guidelines.
### Languages
French
## Dataset Creation
The main author of the resource is Benoît Sagot (Inria, France).
Please refer to the main paper and other Lefff-related papers for details.
## Additional Information
### Licensing Information
The dataset, as the whole Lefff, is distributed under the LGPL-LR licence.
### Citation Information
The main paper regarding the Lefff can be found [here](https://aclanthology.org/L10-1487/). Here is the BibTeX entry for the paper:
```
@inproceedings{sagot:inria-00521242,
TITLE = {{The Lefff, a freely available and large-coverage morphological and syntactic lexicon for French}},
AUTHOR = {Sagot, Beno{\^i}t},
URL = {https://hal.inria.fr/inria-00521242},
BOOKTITLE = {{7th international conference on Language Resources and Evaluation (LREC 2010)}},
ADDRESS = {Valletta, Malta},
YEAR = {2010},
MONTH = May,
PDF = {https://hal.inria.fr/inria-00521242/file/lrec10lefff.pdf},
HAL_ID = {inria-00521242},
HAL_VERSION = {v1},
}
```
For specific parts of speech or other parts of the lexicon, please cite the corresponding papers whenever relevant.
|
hlillemark/flores200_eng_output_scaffolding_mt5 | ---
dataset_info:
features:
- name: id
dtype: int32
- name: input_ids
sequence: int32
- name: attention_mask
sequence: int8
- name: labels
sequence: int64
splits:
- name: train
num_bytes: 10718199156
num_examples: 10240000
- name: val
num_bytes: 3827042
num_examples: 5000
- name: test
num_bytes: 7670994
num_examples: 10000
download_size: 4669652168
dataset_size: 10729697192
---
# Dataset Card for "flores200_eng_output_scaffolding_mt5"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
joey234/mmlu-us_foreign_policy | ---
dataset_info:
features:
- name: question
dtype: string
- name: choices
sequence: string
- name: answer
dtype:
class_label:
names:
'0': A
'1': B
'2': C
'3': D
- name: negate_openai_prompt
struct:
- name: content
dtype: string
- name: role
dtype: string
- name: neg_question
dtype: string
- name: fewshot_context
dtype: string
- name: fewshot_context_neg
dtype: string
splits:
- name: dev
num_bytes: 4594
num_examples: 5
- name: test
num_bytes: 407390
num_examples: 100
download_size: 70800
dataset_size: 411984
configs:
- config_name: default
data_files:
- split: dev
path: data/dev-*
- split: test
path: data/test-*
---
# Dataset Card for "mmlu-us_foreign_policy"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
fuyu-quant/ibl-regression-ver2-linear-pred | ---
dataset_info:
features:
- name: prediction
dtype: string
- name: 'true'
dtype: string
- name: index
dtype: int64
splits:
- name: pred
num_bytes: 96424
num_examples: 1000
download_size: 24459
dataset_size: 96424
configs:
- config_name: default
data_files:
- split: pred
path: data/pred-*
---
|
CyberHarem/shikinami_kantaicollection | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of shikinami/敷波/敷波 (Kantai Collection)
This is the dataset of shikinami/敷波/敷波 (Kantai Collection), containing 500 images and their tags.
The core tags of this character are `brown_hair, ponytail, brown_eyes, short_hair, ribbon, hair_ribbon`, which are pruned in this dataset.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
## List of Packages
| Name | Images | Size | Download | Type | Description |
|:-----------------|---------:|:-----------|:----------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------|
| raw | 500 | 398.10 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shikinami_kantaicollection/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). |
| 800 | 500 | 266.04 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shikinami_kantaicollection/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. |
| stage3-p480-800 | 1057 | 529.52 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shikinami_kantaicollection/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
| 1200 | 500 | 367.15 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shikinami_kantaicollection/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. |
| stage3-p480-1200 | 1057 | 700.33 MiB | [Download](https://huggingface.co/datasets/CyberHarem/shikinami_kantaicollection/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
### Load Raw Dataset with Waifuc
We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code
```python
import os
import zipfile
from huggingface_hub import hf_hub_download
from waifuc.source import LocalSource
# download raw archive file
zip_file = hf_hub_download(
repo_id='CyberHarem/shikinami_kantaicollection',
repo_type='dataset',
filename='dataset-raw.zip',
)
# extract files to your directory
dataset_dir = 'dataset_dir'
os.makedirs(dataset_dir, exist_ok=True)
with zipfile.ZipFile(zip_file, 'r') as zf:
zf.extractall(dataset_dir)
# load the dataset with waifuc
source = LocalSource(dataset_dir)
for item in source:
print(item.image, item.meta['filename'], item.meta['tags'])
```
## List of Clusters
List of tag clustering result, maybe some outfits can be mined here.
### Raw Text Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| 0 | 13 |  |  |  |  |  | 1girl, brown_sailor_collar, serafuku, solo, looking_at_viewer, blush, short_sleeves, brown_skirt, pleated_skirt, simple_background, upper_body, white_background, twitter_username |
| 1 | 5 |  |  |  |  |  | 1girl, black_socks, brown_sailor_collar, brown_skirt, kneehighs, pleated_skirt, serafuku, simple_background, solo, white_background, looking_at_viewer, blush, short_sleeves, sitting, open_mouth |
| 2 | 7 |  |  |  |  |  | 1girl, black_sailor_collar, black_skirt, black_socks, serafuku, solo, anchor_symbol, kneehighs, pleated_skirt, looking_at_viewer, white_background, short_sleeves, simple_background, wariza |
| 3 | 9 |  |  |  |  |  | 1girl, black_socks, full_body, kneehighs, machinery, pleated_skirt, serafuku, simple_background, solo, smokestack, white_background, black_sailor_collar, black_skirt, looking_at_viewer, torpedo_launcher, adapted_turret, standing, cannon, rigging, short_sleeves, short_ponytail |
| 4 | 5 |  |  |  |  |  | 1girl, black_socks, brown_sailor_collar, brown_skirt, grey_footwear, kneehighs, pleated_skirt, serafuku, solo, full_body, short_sleeves, anchor_symbol, shoes, blush, open_mouth, outdoors, smile, standing |
| 5 | 5 |  |  |  |  |  | anchor_symbol, black_sailor_collar, kneehighs, pleated_skirt, serafuku, short_sleeves, solo_focus, black_skirt, black_socks, 2girls, long_hair, standing |
| 6 | 7 |  |  |  |  |  | 1girl, black_pantyhose, detached_collar, playboy_bunny, rabbit_ears, small_breasts, solo, strapless_leotard, fake_animal_ears, simple_background, wrist_cuffs, alternate_costume, looking_at_viewer, black_leotard, full_body, grey_background, red_bowtie, red_leotard, sitting, white_background |
| 7 | 9 |  |  |  |  |  | 1girl, solo, cowboy_shot, looking_at_viewer, collarbone, black_one-piece_swimsuit, blue_one-piece_swimsuit, covered_navel, school_swimsuit, small_breasts, standing, blush, gradient_background |
| 8 | 11 |  |  |  |  |  | 1girl, alternate_costume, obi, blush, solo, yukata, looking_at_viewer, upper_body, uchiwa, floral_print |
| 9 | 9 |  |  |  |  |  | 1girl, white_apron, black_dress, enmaided, solo, blush, maid_apron, maid_headdress, simple_background, frilled_apron, looking_at_viewer, open_mouth, puffy_sleeves, short_sleeves, white_background |
### Table Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | brown_sailor_collar | serafuku | solo | looking_at_viewer | blush | short_sleeves | brown_skirt | pleated_skirt | simple_background | upper_body | white_background | twitter_username | black_socks | kneehighs | sitting | open_mouth | black_sailor_collar | black_skirt | anchor_symbol | wariza | full_body | machinery | smokestack | torpedo_launcher | adapted_turret | standing | cannon | rigging | short_ponytail | grey_footwear | shoes | outdoors | smile | solo_focus | 2girls | long_hair | black_pantyhose | detached_collar | playboy_bunny | rabbit_ears | small_breasts | strapless_leotard | fake_animal_ears | wrist_cuffs | alternate_costume | black_leotard | grey_background | red_bowtie | red_leotard | cowboy_shot | collarbone | black_one-piece_swimsuit | blue_one-piece_swimsuit | covered_navel | school_swimsuit | gradient_background | obi | yukata | uchiwa | floral_print | white_apron | black_dress | enmaided | maid_apron | maid_headdress | frilled_apron | puffy_sleeves |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:----------------------|:-----------|:-------|:--------------------|:--------|:----------------|:--------------|:----------------|:--------------------|:-------------|:-------------------|:-------------------|:--------------|:------------|:----------|:-------------|:----------------------|:--------------|:----------------|:---------|:------------|:------------|:-------------|:-------------------|:-----------------|:-----------|:---------|:----------|:-----------------|:----------------|:--------|:-----------|:--------|:-------------|:---------|:------------|:------------------|:------------------|:----------------|:--------------|:----------------|:--------------------|:-------------------|:--------------|:--------------------|:----------------|:------------------|:-------------|:--------------|:--------------|:-------------|:---------------------------|:--------------------------|:----------------|:------------------|:----------------------|:------|:---------|:---------|:---------------|:--------------|:--------------|:-----------|:-------------|:-----------------|:----------------|:----------------|
| 0 | 13 |  |  |  |  |  | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 1 | 5 |  |  |  |  |  | X | X | X | X | X | X | X | X | X | X | | X | | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 2 | 7 |  |  |  |  |  | X | | X | X | X | | X | | X | X | | X | | X | X | | | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 3 | 9 |  |  |  |  |  | X | | X | X | X | | X | | X | X | | X | | X | X | | | X | X | | | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 4 | 5 |  |  |  |  |  | X | X | X | X | | X | X | X | X | | | | | X | X | | X | | | X | | X | | | | | X | | | | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 5 | 5 |  |  |  |  |  | | | X | | | | X | | X | | | | | X | X | | | X | X | X | | | | | | | X | | | | | | | | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 6 | 7 |  |  |  |  |  | X | | | X | X | | | | | X | | X | | | | X | | | | | | X | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | |
| 7 | 9 |  |  |  |  |  | X | | | X | X | X | | | | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | | | | | X | | | | | | | | | X | X | X | X | X | X | X | | | | | | | | | | | |
| 8 | 11 |  |  |  |  |  | X | | | X | X | X | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | | X | X | X | X | | | | | | | |
| 9 | 9 |  |  |  |  |  | X | | | X | X | X | X | | | X | | X | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X |
|
zuleo/karen-fukuhara | ---
license: creativeml-openrail-m
tags:
- stable-diffusion
- embedding
- textual-inversion
- text-to-image
- image-to-image
- art
- artistic
---
# Karen Fukuhara textual inversion
This is an embedding of Karen Fukuhara. She plays several different amazing roles from acting to voice acting: (Kimiko Miyashiro, Glimmah, Kipo).

## Embedding Usage
Use the token ```kfukvf-1990```

---
## 🎶 Prompt Examples
🧾 ```Perfectly-centered portrait-photograph of kfukvf-1990, dressed as a queen with glimmering jewelry, lifelike, subsurface scattering, photorealism, 8k resolution, beautiful, dynamic lighting```
⛔ Negative prompt: ```(bad_prompt_version2:0.8), lowres, text, error, cropped, worst quality, low quality, normal quality, jpeg artifacts, signature, watermark, username, blurry, ((duplicate)), ((morbid)), ((mutilated)), out of frame, (((mutation))), (((deformed))), ((bad anatomy)), (((bad proportions))), ((extra limbs)), cloned face, (((disfigured))), extra limbs, gross proportions, (malformed limbs), ((missing arms)), ((missing legs)), (((extra arms))), (((extra legs))), (fused fingers), (((long neck)))```
_Steps: 20, Sampler: DDIM, CFG scale: 7, Seed: 2154893269, Size: 512x768, Model hash: d8691b4d16_
---
🧾 ```Perfectly-centered portrait-photograph of kfukvf-1990, sitting near a table having a drink, lifelike, subsurface scattering, photorealism, 8k resolution, beautiful```
⛔ Negative prompt: ```(bad_prompt_version2:0.8), lowres, text, error, cropped, worst quality, low quality, normal quality, jpeg artifacts, signature, watermark, username, blurry, ((duplicate)), ((morbid)), ((mutilated)), out of frame, (((mutation))), (((deformed))), ((bad anatomy)), (((bad proportions))), ((extra limbs)), cloned face, (((disfigured))), extra limbs, gross proportions, (malformed limbs), ((missing arms)), ((missing legs)), (((extra arms))), (((extra legs))), (fused fingers), (((long neck)))```
_Steps: 20, Sampler: DPM++ SDE Karras, CFG scale: 7, Seed: 2899587651, Size: 512x768, Model hash: e3cda540bf_
---
🧾 ```Perfectly-centered portrait-photograph of kfukvf-1990, wearing a dirty jacket near a busy city, lifelike, subsurface scattering, photorealism, 8k resolution, beautiful```
⛔ Negative prompt: ```(bad_prompt_version2:0.8), lowres, text, error, cropped, worst quality, low quality, normal quality, jpeg artifacts, signature, watermark, username, blurry, ((duplicate)), ((morbid)), ((mutilated)), out of frame, (((mutation))), (((deformed))), ((bad anatomy)), (((bad proportions))), ((extra limbs)), cloned face, (((disfigured))), extra limbs, gross proportions, (malformed limbs), ((missing arms)), ((missing legs)), (((extra arms))), (((extra legs))), (fused fingers), (((long neck)))```
_Steps: 20, Sampler: DPM++ SDE Karras, CFG scale: 7, Seed: 3576359901, Size: 512x768, Model hash: 67abd65708_


---
## 🎴 text2img Sampler and Checkpoint grids:
It's always great to get a visual of what's going on with sampler using different models with this embedding. See the examples below and tune them to your liking.
[Sampling Grid](https://huggingface.co/datasets/zuleo/karen-fukuhara/resolve/main/images/sampler_ckpt_grid.png)
---
☕ If you enjoy this model, buy me a coffee [Buy a coffee](https://ko-fi.com/3eegames)
--- |
CaliD3f/garbagepailkids | ---
license: openrail
---
|
Nadav/pixel_glue_cola | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': '0'
'1': '1'
splits:
- name: train
num_bytes: 50427044.125
num_examples: 8551
- name: validation
num_bytes: 6217509.625
num_examples: 1043
download_size: 46779514
dataset_size: 56644553.75
---
# Dataset Card for "pixel_glue_cola"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
maxidl/LeoLM-ArcChallenge_de-fixed | ---
dataset_info:
features:
- name: id
dtype: string
- name: question
dtype: string
- name: choices
struct:
- name: text
sequence: string
- name: label
sequence: string
- name: answerKey
dtype: string
- name: question_de
dtype: string
- name: choices_de
struct:
- name: label
sequence: string
- name: text
sequence: string
- name: translation_de
dtype: string
splits:
- name: test
num_bytes: 1170650
num_examples: 1172
- name: validation
num_bytes: 301780
num_examples: 299
download_size: 807886
dataset_size: 1472430
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
- split: validation
path: data/validation-*
---
The [LeoLM/ArcChallenge_de](https://huggingface.co/datasets/LeoLM/ArcChallenge_de) dataset, but with label errors fixed.
The fix applied:
```
import datasets as hfds
ds = hfds.load_dataset("LeoLM/ArcChallenge_de")
def check_label(row):
choices_en = row["choices"]
label_en = choices_en["label"]
choices_de = row["choices_de"]
label_de = choices_de["label"]
if label_en != label_de:
print(f"\nLabel mismatch:\n{row}\n{label_en}\n{label_de}")
choices_de["label"] = label_en
return {"choices": choices_en, "choices_de": choices_de}
ds = ds.map(lambda row: check_label(row))
ds.push_to_hub("maxidl/LeoLM-ArcChallenge_de-fixed")
```
Output of running this:
```
Label mismatch:
{'id': 'NYSEDREGENTS_2014_4_4', 'question': 'Which state of matter has no definite volume and no definite shape?', 'choices': {'text': ['gas', 'liquid', 'solid'], 'label': ['A', 'B', 'C']}, 'answerKey': 'A', 'question_de': 'Welcher Aggregatzustand hat kein bestimmtes Volumen und keine bestimmte Form?', 'choices_de': {'label': ['A', 'B', 'C', 'D'], 'text': ['Gas', 'Flüssigkeit', 'Feststoff']}, 'translation_de': '{"question": "Welcher Aggregatzustand hat kein bestimmtes Volumen und keine bestimmte Form?", "choices": ["Gas", "Flüssigkeit", "Feststoff"]}'}
['A', 'B', 'C']
['A', 'B', 'C', 'D']
Label mismatch:
{'id': 'TIMSS_2003_8_pg29', 'question': 'Which of the following organs is NOT situated in the abdomen?', 'choices': {'text': ['liver', 'kidney', 'stomach', 'bladder', 'heart'], 'label': ['A', 'B', 'C', 'D', 'E']}, 'answerKey': 'E', 'question_de': 'Welches der folgenden Organe ist NICHT im Bauchraum?', 'choices_de': {'label': ['A', 'B', 'C', 'D'], 'text': ['Leber', 'Niere', 'Magen', 'Blase', 'Herz']}, 'translation_de': '{"question": "Welches der folgenden Organe ist NICHT im Bauchraum?", "choices": ["Leber", "Niere", "Magen", "Blase", "Herz"]}'}
['A', 'B', 'C', 'D', 'E']
['A', 'B', 'C', 'D']
Label mismatch:
{'id': 'NYSEDREGENTS_2014_4_19', 'question': 'As kittens grow into cats, their body weight usually', 'choices': {'text': ['decreases', 'increases', 'remains the same'], 'label': ['A', 'B', 'C']}, 'answerKey': 'B', 'question_de': 'Wenn Kätzchen zu Katzen heranwachsen, nimmt ihr Körpergewicht normalerweise', 'choices_de': {'label': ['A', 'B', 'C', 'D'], 'text': ['ab', 'zu', 'bleibt gleich']}, 'translation_de': '{"question": "Wenn Kätzchen zu Katzen heranwachsen, nimmt ihr Körpergewicht normalerweise", "choices": ["ab", "zu", "bleibt gleich"]}'}
['A', 'B', 'C']
['A', 'B', 'C', 'D']
Label mismatch:
{'id': 'NYSEDREGENTS_2014_8_12', 'question': "A main function of a plant's seed is to", 'choices': {'text': ['store food to be used during early development', 'attract pollen to be used during development', 'take in light energy to be used during photosynthesis', 'produce chlorophyll to be used during photosynthesis'], 'label': ['1', '2', '3', '4']}, 'answerKey': '1', 'question_de': 'Eine Hauptfunktion des Samens einer Pflanze ist es,', 'choices_de': {'label': ['A', 'B', 'C', 'D'], 'text': ['Nahrung zu speichern, die während der frühen Entwicklung verwendet wird', 'Pollen anzulocken, der während der Entwicklung verwendet wird', 'Lichtenergie aufzunehmen, die während der Photosynthese verwendet wird', 'Chlorophyll zu produzieren, das während der Photosynthese verwendet wird']}, 'translation_de': '{"question": "Eine Hauptfunktion des Samens einer Pflanze ist es,", "choices": ["Nahrung zu speichern, die während der frühen Entwicklung verwendet wird", "Pollen anzulocken, der während der Entwicklung verwendet wird", "Lichtenergie aufzunehmen, die während der Photosynthese verwendet wird", "Chlorophyll zu produzieren, das während der Photosynthese verwendet wird"]}'}
['1', '2', '3', '4']
['A', 'B', 'C', 'D']
Label mismatch:
{'id': 'NYSEDREGENTS_2014_4_28', 'question': 'Large birds have been eating small animals in an area. If all of the large birds died from a disease, the number of small animals in the area would probably', 'choices': {'text': ['decrease', 'increase', 'remain the same'], 'label': ['A', 'B', 'C']}, 'answerKey': 'B', 'question_de': 'Große Vögel haben kleine Tiere in einem Gebiet gefressen. Wenn alle großen Vögel an einer Krankheit sterben würden, würde die Anzahl der kleinen Tiere in der Gegend wahrscheinlich', 'choices_de': {'label': ['A', 'B', 'C', 'D'], 'text': ['abnehmen', 'zunehmen', 'gleich bleiben']}, 'translation_de': '{"question": "Große Vögel haben kleine Tiere in einem Gebiet gefressen. Wenn alle großen Vögel an einer Krankheit sterben würden, würde die Anzahl der kleinen Tiere in der Gegend wahrscheinlich", "choices": ["abnehmen", "zunehmen", "gleich bleiben"]}'}
['A', 'B', 'C']
['A', 'B', 'C', 'D']
Label mismatch:
{'id': 'NYSEDREGENTS_2014_8_38', 'question': 'A substance in the solid phase (state) of matter has', 'choices': {'text': ['a definite shape and a definite volume', 'a definite shape, but no definite volume', 'no definite shape, but a definite volume', 'no definite shape and no definite volume'], 'label': ['1', '2', '3', '4']}, 'answerKey': '1', 'question_de': 'Ein Stoff im festen Aggregatzustand hat', 'choices_de': {'label': ['A', 'B', 'C', 'D'], 'text': ['eine bestimmte Form und ein bestimmtes Volumen', 'eine bestimmte Form, aber kein bestimmtes Volumen', 'keine bestimmte Form, aber ein bestimmtes Volumen', 'weder eine bestimmte Form noch ein bestimmtes Volumen']}, 'translation_de': '{"question": "Ein Stoff im festen Aggregatzustand hat", "choices": ["eine bestimmte Form und ein bestimmtes Volumen", "eine bestimmte Form, aber kein bestimmtes Volumen", "keine bestimmte Form, aber ein bestimmtes Volumen", "weder eine bestimmte Form noch ein bestimmtes Volumen"]}'}
['1', '2', '3', '4']
['A', 'B', 'C', 'D']
Label mismatch:
{'id': 'NYSEDREGENTS_2014_8_20', 'question': 'The equation below shows the products formed when a solution of silver nitrate (AgNO3) reacts with a solution of sodium chloride (NaCl). AgNO3 \x02+ NaCl (Reactants) -> NaNO3 \x02+ AgCl (Products) In this equation, the total mass of the reactants is', 'choices': {'text': ['greater than the total mass of the products', 'equal to the total mass of the products', 'equal to the mass of AgCl', 'less than the mass of AgCl'], 'label': ['1', '2', '3', '4']}, 'answerKey': '2', 'question_de': 'Die Gleichung unten zeigt die Produkte, die entstehen, wenn eine Lösung von Silbernitrat (AgNO3) mit einer Lösung von Natriumchlorid (NaCl) reagiert. AgNO3 \x02+ NaCl (Edukte) -> NaNO3 \x02+ AgCl (Produkte) In dieser Gleichung ist die Gesamtmasse der Edukte', 'choices_de': {'label': ['A', 'B', 'C', 'D'], 'text': ['größer als die Gesamtmasse der Produkte', 'gleich der Gesamtmasse der Produkte', 'gleich der Masse von AgCl', 'kleiner als die Masse von AgCl']}, 'translation_de': '{"question": "Die Gleichung unten zeigt die Produkte, die entstehen, wenn eine Lösung von Silbernitrat (AgNO3) mit einer Lösung von Natriumchlorid (NaCl) reagiert. AgNO3 \\u0002+ NaCl (Edukte) -> NaNO3 \\u0002+ AgCl (Produkte) In dieser Gleichung ist die Gesamtmasse der Edukte", "choices": ["größer als die Gesamtmasse der Produkte", "gleich der Gesamtmasse der Produkte", "gleich der Masse von AgCl", "kleiner als die Masse von AgCl"]}'}
['1', '2', '3', '4']
['A', 'B', 'C', 'D']
Label mismatch:
{'id': 'NYSEDREGENTS_2014_8_41', 'question': 'In a car accident, a seat belt helps prevent injuries by applying a force', 'choices': {'text': ['less than the force of the moving passenger', 'greater than the force of the moving car', "in the same direction as the car's motion", "in the opposite direction of the passenger's motion"], 'label': ['1', '2', '3', '4']}, 'answerKey': '4', 'question_de': 'Bei einem Autounfall hilft der Sicherheitsgurt Verletzungen zu vermeiden, indem er eine Kraft anwendet', 'choices_de': {'label': ['A', 'B', 'C', 'D'], 'text': ['die kleiner ist als die Kraft des sich bewegenden Passagiers', 'die größer ist als die Kraft des sich bewegenden Autos', 'in die gleiche Richtung wie die Bewegung des Autos', 'in entgegengesetzter Richtung zur Bewegung des Passagiers']}, 'translation_de': '{"question": "Bei einem Autounfall hilft der Sicherheitsgurt Verletzungen zu vermeiden, indem er eine Kraft anwendet", "choices": ["die kleiner ist als die Kraft des sich bewegenden Passagiers", "die größer ist als die Kraft des sich bewegenden Autos", "in die gleiche Richtung wie die Bewegung des Autos", "in entgegengesetzter Richtung zur Bewegung des Passagiers"]}'}
['1', '2', '3', '4']
['A', 'B', 'C', 'D']
```
|
Aditya000001/llamadataset | ---
license: wtfpl
---
|
jtatman/CoT_reformatted_preprocessed_llama2 | ---
dataset_info:
features:
- name: text
dtype: string
- name: input_ids
sequence: int32
- name: attention_mask
sequence: int8
splits:
- name: train
num_bytes: 10782796585
num_examples: 1851975
- name: eval
num_bytes: 180549
num_examples: 25
download_size: 1530950918
dataset_size: 10782977134
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: eval
path: data/eval-*
---
|
trajanson/black-long-sleeve-jersey | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 4220210.0
num_examples: 19
download_size: 4215502
dataset_size: 4220210.0
---
# Dataset Card for "black-long-sleeve-jersey"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Princess3/Court_data_1k | ---
license: wtfpl
language:
- en
---
Just some New Zealand court decisions data, probably needs some more cleaning. data ranges between 1975 to now and its all randomly selected
1158 cases |
tr416/dataset_20231007_033121 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
dataset_info:
features:
- name: input_ids
sequence: int32
- name: attention_mask
sequence: int8
splits:
- name: train
num_bytes: 762696.0
num_examples: 297
- name: test
num_bytes: 7704.0
num_examples: 3
download_size: 74119
dataset_size: 770400.0
---
# Dataset Card for "dataset_20231007_033121"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
kunishou/hh-rlhf-49k-ja | ---
license: mit
---
This dataset was created by automatically translating part of "Anthropic/hh-rlhf" into Japanese.
This dataset is also included in "mosaicml/dolly_hhrlhf".
The "ng_translation" flag indicates that the translation was not successful, and "1" means that the translation failed.
Therefore, for data with "1", "instruction" and "instruction_en" contain the same text.
以下の通りに読み込むことで"ng_translation"が"1"(翻訳誤り)のものを除外して使用できます。
```
pip install datasets
```
```
from datasets import Dataset, load_dataset
dataset = load_dataset("kunishou/hh-rlhf-49k-ja")
dataset.set_format(type="pandas")
df = dataset["train"][:]
df = df[df["ng_translation"]!="1"].drop(["ng_translation", "index"], axis=1).reset_index()
dataset = Dataset.from_pandas(df)
dataset
```
hh-rlhf repository
https://github.com/anthropics/hh-rlhf
Anthropic/hh-rlhf
https://huggingface.co/datasets/Anthropic/hh-rlhf
mosaicml/dolly_hhrlhf
https://huggingface.co/datasets/mosaicml/dolly_hhrlhf |
Ranjit/displace_dev_data | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: label
dtype:
class_label:
names:
'0': test
'1': train
- name: language
dtype: string
splits:
- name: train
num_bytes: 1788567880.172
num_examples: 30569
- name: test
num_bytes: 352004692.0
num_examples: 5560
download_size: 2274720816
dataset_size: 2140572572.172
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
Csplk/THE.ASCII.ART.EMPORIUM | ---
license: cc-by-nc-sa-4.0
task_categories:
- text2text-generation
- text-generation
language:
- en
tags:
- art
pretty_name: THE.ASCII.ART.EMPORIUM
source: https://ascii.mozz.us:7070
---
```
https://asciiartist.com/sitefiles/respectartistscampaign.html
(published by Laura Brown aka ldb)
Respect ASCII Artists Campaign
Most ASCII artists will tag their ASCII creation with their initials.
This is not just about signing your art, it shows the original artist.
If someone else colours the art, or modifies it in any other way, the
artist initials need to be kept with it. Anyone modifying art can add
their initials (usually something like ldb/ you) and a note about what
they had done to the original art. The original artist initials are
left on the art, the initials of the person who modified it are added
after the original artist.
If you can not find the original artist, or a link to them on a site
or social media, include a link to the source where you found the art
as something being better than nothing. This helps preserve and
archive the art as well as giving the artist credit.
This is what the Respect ASCII Artists Campaign is about. Give respect
to the original artist and leave their initials on the work.
Not all ASCII art is available freely. Don't assume it is there for
anyone to take (copy and paste or screenshot). Ask to repost, modify,
or otherwise use the ASCII art you find online. Don't sell art you do
not own or did not fully create yourself.
The ribbon above is from the original Respect ASCII Artists Campaign.
Started April 1998, by truffle.
``` |
yuan-sf63/chenyu_mask_64 | ---
dataset_info:
features:
- name: feature
dtype: string
- name: target
dtype: string
splits:
- name: train
num_bytes: 10038428.1
num_examples: 91242
- name: validation
num_bytes: 1115380.9
num_examples: 10138
download_size: 0
dataset_size: 11153809.0
---
# Dataset Card for "chenyu_mask_64"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
jwigginton/news-sp500 | ---
dataset_info:
features:
- name: symbol
dtype: string
- name: body
dtype: string
- name: publisher
dtype: string
- name: publish_time
dtype: timestamp[ns, tz=GMT]
- name: title
dtype: string
- name: url
dtype: string
- name: uuid
dtype: string
splits:
- name: train
num_bytes: 12055461
num_examples: 2010
download_size: 5673552
dataset_size: 12055461
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
metaeval/nli-veridicality-transitivity | ---
license: cc
task_categories:
- text-classification
language:
- en
task_ids:
- natural-language-inference
---
```bib
@inproceedings{yanaka-etal-2021-exploring,
title = "Exploring Transitivity in Neural {NLI} Models through Veridicality",
author = "Yanaka, Hitomi and
Mineshima, Koji and
Inui, Kentaro",
booktitle = "Proceedings of the 16th Conference of the European Chapter of the Association for Computational Linguistics: Main Volume",
year = "2021",
pages = "920--934",
}
``` |
prithviraj-maurya/safesign-combined-general-ift | ---
dataset_info:
features:
- name: question
dtype: string
- name: response
dtype: string
splits:
- name: train
num_bytes: 10716758457
num_examples: 2596500
download_size: 5594595999
dataset_size: 10716758457
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/details_google__recurrentgemma-2b | ---
pretty_name: Evaluation run of google/recurrentgemma-2b
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [google/recurrentgemma-2b](https://huggingface.co/google/recurrentgemma-2b)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_google__recurrentgemma-2b-hf_private\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-04-09T03:06:15.036120](https://huggingface.co/datasets/open-llm-leaderboard/details_google__recurrentgemma-2b-hf_private/blob/main/results_2024-04-09T03-06-15.036120.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.3466998813893547,\n\
\ \"acc_stderr\": 0.033548531585255545,\n \"acc_norm\": 0.3467198572346389,\n\
\ \"acc_norm_stderr\": 0.034298588914231626,\n \"mc1\": 0.211750305997552,\n\
\ \"mc1_stderr\": 0.014302068353925609,\n \"mc2\": 0.3501381307826391,\n\
\ \"mc2_stderr\": 0.013501544365145366\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.27047781569965873,\n \"acc_stderr\": 0.012980954547659556,\n\
\ \"acc_norm\": 0.28924914675767915,\n \"acc_norm_stderr\": 0.013250012579393443\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.44064927305317664,\n\
\ \"acc_stderr\": 0.004954503606471611,\n \"acc_norm\": 0.569308902609042,\n\
\ \"acc_norm_stderr\": 0.004941609820763589\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.3925925925925926,\n\
\ \"acc_stderr\": 0.04218506215368879,\n \"acc_norm\": 0.3925925925925926,\n\
\ \"acc_norm_stderr\": 0.04218506215368879\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.3815789473684211,\n \"acc_stderr\": 0.03953173377749194,\n\
\ \"acc_norm\": 0.3815789473684211,\n \"acc_norm_stderr\": 0.03953173377749194\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n\
\ \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \
\ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.38113207547169814,\n \"acc_stderr\": 0.02989060968628663,\n\
\ \"acc_norm\": 0.38113207547169814,\n \"acc_norm_stderr\": 0.02989060968628663\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3472222222222222,\n\
\ \"acc_stderr\": 0.039812405437178615,\n \"acc_norm\": 0.3472222222222222,\n\
\ \"acc_norm_stderr\": 0.039812405437178615\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n\
\ \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \
\ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.34104046242774566,\n\
\ \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.34104046242774566,\n\
\ \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.04280105837364395,\n\
\ \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.04280105837364395\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n\
\ \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.2978723404255319,\n \"acc_stderr\": 0.029896145682095462,\n\
\ \"acc_norm\": 0.2978723404255319,\n \"acc_norm_stderr\": 0.029896145682095462\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n\
\ \"acc_stderr\": 0.04266339443159393,\n \"acc_norm\": 0.2894736842105263,\n\
\ \"acc_norm_stderr\": 0.04266339443159393\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.43448275862068964,\n \"acc_stderr\": 0.041307408795554966,\n\
\ \"acc_norm\": 0.43448275862068964,\n \"acc_norm_stderr\": 0.041307408795554966\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.2698412698412698,\n \"acc_stderr\": 0.022860838309232072,\n \"\
acc_norm\": 0.2698412698412698,\n \"acc_norm_stderr\": 0.022860838309232072\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30952380952380953,\n\
\ \"acc_stderr\": 0.04134913018303316,\n \"acc_norm\": 0.30952380952380953,\n\
\ \"acc_norm_stderr\": 0.04134913018303316\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \
\ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.3741935483870968,\n \"acc_stderr\": 0.027528904299845777,\n \"\
acc_norm\": 0.3741935483870968,\n \"acc_norm_stderr\": 0.027528904299845777\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.2857142857142857,\n \"acc_stderr\": 0.03178529710642749,\n \"\
acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.03178529710642749\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\"\
: 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.22424242424242424,\n \"acc_stderr\": 0.03256866661681102,\n\
\ \"acc_norm\": 0.22424242424242424,\n \"acc_norm_stderr\": 0.03256866661681102\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.4797979797979798,\n \"acc_stderr\": 0.03559443565563919,\n \"\
acc_norm\": 0.4797979797979798,\n \"acc_norm_stderr\": 0.03559443565563919\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.41450777202072536,\n \"acc_stderr\": 0.03555300319557673,\n\
\ \"acc_norm\": 0.41450777202072536,\n \"acc_norm_stderr\": 0.03555300319557673\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.38461538461538464,\n \"acc_stderr\": 0.02466674491518722,\n\
\ \"acc_norm\": 0.38461538461538464,\n \"acc_norm_stderr\": 0.02466674491518722\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.2222222222222222,\n \"acc_stderr\": 0.02534809746809783,\n \
\ \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.02534809746809783\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.28991596638655465,\n \"acc_stderr\": 0.029472485833136094,\n\
\ \"acc_norm\": 0.28991596638655465,\n \"acc_norm_stderr\": 0.029472485833136094\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526732,\n \"\
acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526732\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.3926605504587156,\n \"acc_stderr\": 0.020937505161201093,\n \"\
acc_norm\": 0.3926605504587156,\n \"acc_norm_stderr\": 0.020937505161201093\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.2962962962962963,\n \"acc_stderr\": 0.03114144782353603,\n \"\
acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.03114144782353603\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.24509803921568626,\n \"acc_stderr\": 0.030190282453501936,\n \"\
acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.030190282453501936\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.25738396624472576,\n \"acc_stderr\": 0.028458820991460302,\n \
\ \"acc_norm\": 0.25738396624472576,\n \"acc_norm_stderr\": 0.028458820991460302\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.32286995515695066,\n\
\ \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.32286995515695066,\n\
\ \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.40458015267175573,\n \"acc_stderr\": 0.043046937953806645,\n\
\ \"acc_norm\": 0.40458015267175573,\n \"acc_norm_stderr\": 0.043046937953806645\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.4214876033057851,\n \"acc_stderr\": 0.04507732278775094,\n \"\
acc_norm\": 0.4214876033057851,\n \"acc_norm_stderr\": 0.04507732278775094\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.3333333333333333,\n\
\ \"acc_stderr\": 0.04557239513497751,\n \"acc_norm\": 0.3333333333333333,\n\
\ \"acc_norm_stderr\": 0.04557239513497751\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.25766871165644173,\n \"acc_stderr\": 0.03436150827846917,\n\
\ \"acc_norm\": 0.25766871165644173,\n \"acc_norm_stderr\": 0.03436150827846917\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.32142857142857145,\n\
\ \"acc_stderr\": 0.0443280405529152,\n \"acc_norm\": 0.32142857142857145,\n\
\ \"acc_norm_stderr\": 0.0443280405529152\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.3786407766990291,\n \"acc_stderr\": 0.04802694698258974,\n\
\ \"acc_norm\": 0.3786407766990291,\n \"acc_norm_stderr\": 0.04802694698258974\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.47435897435897434,\n\
\ \"acc_stderr\": 0.03271298896811159,\n \"acc_norm\": 0.47435897435897434,\n\
\ \"acc_norm_stderr\": 0.03271298896811159\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.4329501915708812,\n\
\ \"acc_stderr\": 0.017718469101513982,\n \"acc_norm\": 0.4329501915708812,\n\
\ \"acc_norm_stderr\": 0.017718469101513982\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.3583815028901734,\n \"acc_stderr\": 0.0258167567915842,\n\
\ \"acc_norm\": 0.3583815028901734,\n \"acc_norm_stderr\": 0.0258167567915842\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24022346368715083,\n\
\ \"acc_stderr\": 0.014288343803925302,\n \"acc_norm\": 0.24022346368715083,\n\
\ \"acc_norm_stderr\": 0.014288343803925302\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.027684181883302877,\n\
\ \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.027684181883302877\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.40514469453376206,\n\
\ \"acc_stderr\": 0.027882383791325946,\n \"acc_norm\": 0.40514469453376206,\n\
\ \"acc_norm_stderr\": 0.027882383791325946\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.4104938271604938,\n \"acc_stderr\": 0.027371350925124768,\n\
\ \"acc_norm\": 0.4104938271604938,\n \"acc_norm_stderr\": 0.027371350925124768\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.25886524822695034,\n \"acc_stderr\": 0.026129572527180848,\n \
\ \"acc_norm\": 0.25886524822695034,\n \"acc_norm_stderr\": 0.026129572527180848\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n\
\ \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n\
\ \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.1801470588235294,\n \"acc_stderr\": 0.02334516361654485,\n\
\ \"acc_norm\": 0.1801470588235294,\n \"acc_norm_stderr\": 0.02334516361654485\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.31862745098039214,\n \"acc_stderr\": 0.018850084696468705,\n \
\ \"acc_norm\": 0.31862745098039214,\n \"acc_norm_stderr\": 0.018850084696468705\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.3,\n\
\ \"acc_stderr\": 0.04389311454644286,\n \"acc_norm\": 0.3,\n \
\ \"acc_norm_stderr\": 0.04389311454644286\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.23673469387755103,\n \"acc_stderr\": 0.027212835884073153,\n\
\ \"acc_norm\": 0.23673469387755103,\n \"acc_norm_stderr\": 0.027212835884073153\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.48258706467661694,\n\
\ \"acc_stderr\": 0.03533389234739244,\n \"acc_norm\": 0.48258706467661694,\n\
\ \"acc_norm_stderr\": 0.03533389234739244\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \
\ \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n\
\ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3313253012048193,\n\
\ \"acc_stderr\": 0.03664314777288085,\n \"acc_norm\": 0.3313253012048193,\n\
\ \"acc_norm_stderr\": 0.03664314777288085\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.4619883040935672,\n \"acc_stderr\": 0.03823727092882307,\n\
\ \"acc_norm\": 0.4619883040935672,\n \"acc_norm_stderr\": 0.03823727092882307\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.211750305997552,\n\
\ \"mc1_stderr\": 0.014302068353925609,\n \"mc2\": 0.3501381307826391,\n\
\ \"mc2_stderr\": 0.013501544365145366\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.6827150749802684,\n \"acc_stderr\": 0.01308059841133212\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.15693707354056102,\n \
\ \"acc_stderr\": 0.010019246595616153\n }\n}\n```"
repo_url: https://huggingface.co/google/recurrentgemma-2b
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|arc:challenge|25_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|arc:challenge|25_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|arc:challenge|25_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|gsm8k|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|gsm8k|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|gsm8k|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hellaswag|10_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hellaswag|10_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hellaswag|10_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-04-09T00-15-01.985047.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-04-09T00-53-48.304390.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-04-09T03-06-15.036120.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-management|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-management|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-management|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-virology|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-virology|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-virology|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|truthfulqa:mc|0_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|truthfulqa:mc|0_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|truthfulqa:mc|0_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-04-09T03-06-15.036120.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- '**/details_harness|winogrande|5_2024-04-09T00-15-01.985047.parquet'
- split: 2024_04_09T00_53_48.304390
path:
- '**/details_harness|winogrande|5_2024-04-09T00-53-48.304390.parquet'
- split: 2024_04_09T03_06_15.036120
path:
- '**/details_harness|winogrande|5_2024-04-09T03-06-15.036120.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-04-09T03-06-15.036120.parquet'
- config_name: results
data_files:
- split: 2024_04_09T00_15_01.985047
path:
- results_2024-04-09T00-15-01.985047.parquet
- split: 2024_04_09T00_53_48.304390
path:
- results_2024-04-09T00-53-48.304390.parquet
- split: 2024_04_09T03_06_15.036120
path:
- results_2024-04-09T03-06-15.036120.parquet
- split: latest
path:
- results_2024-04-09T03-06-15.036120.parquet
---
# Dataset Card for Evaluation run of google/recurrentgemma-2b
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [google/recurrentgemma-2b](https://huggingface.co/google/recurrentgemma-2b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_google__recurrentgemma-2b-hf_private",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-04-09T03:06:15.036120](https://huggingface.co/datasets/open-llm-leaderboard/details_google__recurrentgemma-2b-hf_private/blob/main/results_2024-04-09T03-06-15.036120.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.3466998813893547,
"acc_stderr": 0.033548531585255545,
"acc_norm": 0.3467198572346389,
"acc_norm_stderr": 0.034298588914231626,
"mc1": 0.211750305997552,
"mc1_stderr": 0.014302068353925609,
"mc2": 0.3501381307826391,
"mc2_stderr": 0.013501544365145366
},
"harness|arc:challenge|25": {
"acc": 0.27047781569965873,
"acc_stderr": 0.012980954547659556,
"acc_norm": 0.28924914675767915,
"acc_norm_stderr": 0.013250012579393443
},
"harness|hellaswag|10": {
"acc": 0.44064927305317664,
"acc_stderr": 0.004954503606471611,
"acc_norm": 0.569308902609042,
"acc_norm_stderr": 0.004941609820763589
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252605,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252605
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.3925925925925926,
"acc_stderr": 0.04218506215368879,
"acc_norm": 0.3925925925925926,
"acc_norm_stderr": 0.04218506215368879
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.3815789473684211,
"acc_stderr": 0.03953173377749194,
"acc_norm": 0.3815789473684211,
"acc_norm_stderr": 0.03953173377749194
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.38113207547169814,
"acc_stderr": 0.02989060968628663,
"acc_norm": 0.38113207547169814,
"acc_norm_stderr": 0.02989060968628663
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.3472222222222222,
"acc_stderr": 0.039812405437178615,
"acc_norm": 0.3472222222222222,
"acc_norm_stderr": 0.039812405437178615
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.41,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.41,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.34104046242774566,
"acc_stderr": 0.036146654241808254,
"acc_norm": 0.34104046242774566,
"acc_norm_stderr": 0.036146654241808254
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.04280105837364395,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.04280105837364395
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.43,
"acc_stderr": 0.049756985195624284,
"acc_norm": 0.43,
"acc_norm_stderr": 0.049756985195624284
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.2978723404255319,
"acc_stderr": 0.029896145682095462,
"acc_norm": 0.2978723404255319,
"acc_norm_stderr": 0.029896145682095462
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.2894736842105263,
"acc_stderr": 0.04266339443159393,
"acc_norm": 0.2894736842105263,
"acc_norm_stderr": 0.04266339443159393
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.43448275862068964,
"acc_stderr": 0.041307408795554966,
"acc_norm": 0.43448275862068964,
"acc_norm_stderr": 0.041307408795554966
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.2698412698412698,
"acc_stderr": 0.022860838309232072,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.022860838309232072
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.30952380952380953,
"acc_stderr": 0.04134913018303316,
"acc_norm": 0.30952380952380953,
"acc_norm_stderr": 0.04134913018303316
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.3741935483870968,
"acc_stderr": 0.027528904299845777,
"acc_norm": 0.3741935483870968,
"acc_norm_stderr": 0.027528904299845777
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.03178529710642749,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.03178529710642749
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.22424242424242424,
"acc_stderr": 0.03256866661681102,
"acc_norm": 0.22424242424242424,
"acc_norm_stderr": 0.03256866661681102
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.4797979797979798,
"acc_stderr": 0.03559443565563919,
"acc_norm": 0.4797979797979798,
"acc_norm_stderr": 0.03559443565563919
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.41450777202072536,
"acc_stderr": 0.03555300319557673,
"acc_norm": 0.41450777202072536,
"acc_norm_stderr": 0.03555300319557673
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.38461538461538464,
"acc_stderr": 0.02466674491518722,
"acc_norm": 0.38461538461538464,
"acc_norm_stderr": 0.02466674491518722
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.02534809746809783,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.02534809746809783
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.28991596638655465,
"acc_stderr": 0.029472485833136094,
"acc_norm": 0.28991596638655465,
"acc_norm_stderr": 0.029472485833136094
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.31125827814569534,
"acc_stderr": 0.03780445850526732,
"acc_norm": 0.31125827814569534,
"acc_norm_stderr": 0.03780445850526732
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.3926605504587156,
"acc_stderr": 0.020937505161201093,
"acc_norm": 0.3926605504587156,
"acc_norm_stderr": 0.020937505161201093
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.2962962962962963,
"acc_stderr": 0.03114144782353603,
"acc_norm": 0.2962962962962963,
"acc_norm_stderr": 0.03114144782353603
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.24509803921568626,
"acc_stderr": 0.030190282453501936,
"acc_norm": 0.24509803921568626,
"acc_norm_stderr": 0.030190282453501936
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.25738396624472576,
"acc_stderr": 0.028458820991460302,
"acc_norm": 0.25738396624472576,
"acc_norm_stderr": 0.028458820991460302
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.32286995515695066,
"acc_stderr": 0.03138147637575499,
"acc_norm": 0.32286995515695066,
"acc_norm_stderr": 0.03138147637575499
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.40458015267175573,
"acc_stderr": 0.043046937953806645,
"acc_norm": 0.40458015267175573,
"acc_norm_stderr": 0.043046937953806645
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.4214876033057851,
"acc_stderr": 0.04507732278775094,
"acc_norm": 0.4214876033057851,
"acc_norm_stderr": 0.04507732278775094
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.3333333333333333,
"acc_stderr": 0.04557239513497751,
"acc_norm": 0.3333333333333333,
"acc_norm_stderr": 0.04557239513497751
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.25766871165644173,
"acc_stderr": 0.03436150827846917,
"acc_norm": 0.25766871165644173,
"acc_norm_stderr": 0.03436150827846917
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.32142857142857145,
"acc_stderr": 0.0443280405529152,
"acc_norm": 0.32142857142857145,
"acc_norm_stderr": 0.0443280405529152
},
"harness|hendrycksTest-management|5": {
"acc": 0.3786407766990291,
"acc_stderr": 0.04802694698258974,
"acc_norm": 0.3786407766990291,
"acc_norm_stderr": 0.04802694698258974
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.47435897435897434,
"acc_stderr": 0.03271298896811159,
"acc_norm": 0.47435897435897434,
"acc_norm_stderr": 0.03271298896811159
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.4329501915708812,
"acc_stderr": 0.017718469101513982,
"acc_norm": 0.4329501915708812,
"acc_norm_stderr": 0.017718469101513982
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.3583815028901734,
"acc_stderr": 0.0258167567915842,
"acc_norm": 0.3583815028901734,
"acc_norm_stderr": 0.0258167567915842
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.24022346368715083,
"acc_stderr": 0.014288343803925302,
"acc_norm": 0.24022346368715083,
"acc_norm_stderr": 0.014288343803925302
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.37254901960784315,
"acc_stderr": 0.027684181883302877,
"acc_norm": 0.37254901960784315,
"acc_norm_stderr": 0.027684181883302877
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.40514469453376206,
"acc_stderr": 0.027882383791325946,
"acc_norm": 0.40514469453376206,
"acc_norm_stderr": 0.027882383791325946
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.4104938271604938,
"acc_stderr": 0.027371350925124768,
"acc_norm": 0.4104938271604938,
"acc_norm_stderr": 0.027371350925124768
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.25886524822695034,
"acc_stderr": 0.026129572527180848,
"acc_norm": 0.25886524822695034,
"acc_norm_stderr": 0.026129572527180848
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.2457627118644068,
"acc_stderr": 0.010996156635142692,
"acc_norm": 0.2457627118644068,
"acc_norm_stderr": 0.010996156635142692
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.1801470588235294,
"acc_stderr": 0.02334516361654485,
"acc_norm": 0.1801470588235294,
"acc_norm_stderr": 0.02334516361654485
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.31862745098039214,
"acc_stderr": 0.018850084696468705,
"acc_norm": 0.31862745098039214,
"acc_norm_stderr": 0.018850084696468705
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.3,
"acc_stderr": 0.04389311454644286,
"acc_norm": 0.3,
"acc_norm_stderr": 0.04389311454644286
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.23673469387755103,
"acc_stderr": 0.027212835884073153,
"acc_norm": 0.23673469387755103,
"acc_norm_stderr": 0.027212835884073153
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.48258706467661694,
"acc_stderr": 0.03533389234739244,
"acc_norm": 0.48258706467661694,
"acc_norm_stderr": 0.03533389234739244
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.6,
"acc_stderr": 0.04923659639173309,
"acc_norm": 0.6,
"acc_norm_stderr": 0.04923659639173309
},
"harness|hendrycksTest-virology|5": {
"acc": 0.3313253012048193,
"acc_stderr": 0.03664314777288085,
"acc_norm": 0.3313253012048193,
"acc_norm_stderr": 0.03664314777288085
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.4619883040935672,
"acc_stderr": 0.03823727092882307,
"acc_norm": 0.4619883040935672,
"acc_norm_stderr": 0.03823727092882307
},
"harness|truthfulqa:mc|0": {
"mc1": 0.211750305997552,
"mc1_stderr": 0.014302068353925609,
"mc2": 0.3501381307826391,
"mc2_stderr": 0.013501544365145366
},
"harness|winogrande|5": {
"acc": 0.6827150749802684,
"acc_stderr": 0.01308059841133212
},
"harness|gsm8k|5": {
"acc": 0.15693707354056102,
"acc_stderr": 0.010019246595616153
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
mask-distilled-onesec-cv12-each-chunk-uniq/chunk_245 | ---
dataset_info:
features:
- name: logits
sequence: float32
- name: mfcc
sequence:
sequence: float64
splits:
- name: train
num_bytes: 753290112.0
num_examples: 147936
download_size: 767852826
dataset_size: 753290112.0
---
# Dataset Card for "chunk_245"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
autoevaluate/autoeval-eval-futin__guess-en_3-fcaae9-2012466610 | ---
type: predictions
tags:
- autotrain
- evaluation
datasets:
- futin/guess
eval_info:
task: text_zero_shot_classification
model: facebook/opt-66b
metrics: []
dataset_name: futin/guess
dataset_config: en_3
dataset_split: test
col_mapping:
text: text
classes: classes
target: target
---
# Dataset Card for AutoTrain Evaluator
This repository contains model predictions generated by [AutoTrain](https://huggingface.co/autotrain) for the following task and dataset:
* Task: Zero-Shot Text Classification
* Model: facebook/opt-66b
* Dataset: futin/guess
* Config: en_3
* Split: test
To run new evaluation jobs, visit Hugging Face's [automatic model evaluator](https://huggingface.co/spaces/autoevaluate/model-evaluator).
## Contributions
Thanks to [@futin](https://huggingface.co/futin) for evaluating this model. |
fleurylol/cfacatering | ---
license: apache-2.0
---
|
MITCriticalData/L7_Dataset_2008_2015 | ---
license: mit
---
|
lenmon666/lentest | ---
license: apache-2.0
---
# Dataset Card for Dataset Name
<!-- Provide a quick summary of the dataset. -->
This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1).
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
Markuin/Ia_vozes | ---
license: unknown
---
|
open-llm-leaderboard/details_CausalLM__72B-preview | ---
pretty_name: Evaluation run of CausalLM/72B-preview
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [CausalLM/72B-preview](https://huggingface.co/CausalLM/72B-preview) on the [Open\
\ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CausalLM__72B-preview\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-12-09T21:42:26.382618](https://huggingface.co/datasets/open-llm-leaderboard/details_CausalLM__72B-preview/blob/main/results_2023-12-09T21-42-26.382618.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7667362936260237,\n\
\ \"acc_stderr\": 0.027929321227362417,\n \"acc_norm\": 0.7704368351697709,\n\
\ \"acc_norm_stderr\": 0.028461947646281283,\n \"mc1\": 0.3671970624235006,\n\
\ \"mc1_stderr\": 0.01687480500145318,\n \"mc2\": 0.5257567284522894,\n\
\ \"mc2_stderr\": 0.014743557767765337\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.606655290102389,\n \"acc_stderr\": 0.014275101465693024,\n\
\ \"acc_norm\": 0.6518771331058021,\n \"acc_norm_stderr\": 0.013921008595179347\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6468830910177256,\n\
\ \"acc_stderr\": 0.004769618829196502,\n \"acc_norm\": 0.8323043218482374,\n\
\ \"acc_norm_stderr\": 0.0037283229688748914\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \
\ \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7407407407407407,\n\
\ \"acc_stderr\": 0.03785714465066653,\n \"acc_norm\": 0.7407407407407407,\n\
\ \"acc_norm_stderr\": 0.03785714465066653\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.9144736842105263,\n \"acc_stderr\": 0.02275867713088861,\n\
\ \"acc_norm\": 0.9144736842105263,\n \"acc_norm_stderr\": 0.02275867713088861\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.79,\n\
\ \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \
\ \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.8301886792452831,\n \"acc_stderr\": 0.023108393799841326,\n\
\ \"acc_norm\": 0.8301886792452831,\n \"acc_norm_stderr\": 0.023108393799841326\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8958333333333334,\n\
\ \"acc_stderr\": 0.025545239210256917,\n \"acc_norm\": 0.8958333333333334,\n\
\ \"acc_norm_stderr\": 0.025545239210256917\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \
\ \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n\
\ \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \
\ \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7803468208092486,\n\
\ \"acc_stderr\": 0.031568093627031744,\n \"acc_norm\": 0.7803468208092486,\n\
\ \"acc_norm_stderr\": 0.031568093627031744\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.5392156862745098,\n \"acc_stderr\": 0.04959859966384181,\n\
\ \"acc_norm\": 0.5392156862745098,\n \"acc_norm_stderr\": 0.04959859966384181\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.84,\n\
\ \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.8,\n \"acc_stderr\": 0.026148818018424502,\n \
\ \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.026148818018424502\n \
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5701754385964912,\n\
\ \"acc_stderr\": 0.04657047260594963,\n \"acc_norm\": 0.5701754385964912,\n\
\ \"acc_norm_stderr\": 0.04657047260594963\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.8,\n \"acc_stderr\": 0.0333333333333333,\n \
\ \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.0333333333333333\n },\n\
\ \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.6798941798941799,\n\
\ \"acc_stderr\": 0.024026846392873506,\n \"acc_norm\": 0.6798941798941799,\n\
\ \"acc_norm_stderr\": 0.024026846392873506\n },\n \"harness|hendrycksTest-formal_logic|5\"\
: {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04444444444444449,\n\
\ \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04444444444444449\n\
\ },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.54,\n\
\ \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \
\ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-high_school_biology|5\"\
: {\n \"acc\": 0.8903225806451613,\n \"acc_stderr\": 0.017776778700485173,\n\
\ \"acc_norm\": 0.8903225806451613,\n \"acc_norm_stderr\": 0.017776778700485173\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.6600985221674877,\n \"acc_stderr\": 0.033327690684107895,\n \"\
acc_norm\": 0.6600985221674877,\n \"acc_norm_stderr\": 0.033327690684107895\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932263,\n \"acc_norm\"\
: 0.78,\n \"acc_norm_stderr\": 0.04163331998932263\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.8606060606060606,\n \"acc_stderr\": 0.0270459488258654,\n\
\ \"acc_norm\": 0.8606060606060606,\n \"acc_norm_stderr\": 0.0270459488258654\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.9444444444444444,\n \"acc_stderr\": 0.0163199507007674,\n \"acc_norm\"\
: 0.9444444444444444,\n \"acc_norm_stderr\": 0.0163199507007674\n },\n\
\ \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \
\ \"acc\": 0.9896373056994818,\n \"acc_stderr\": 0.007308424386792194,\n\
\ \"acc_norm\": 0.9896373056994818,\n \"acc_norm_stderr\": 0.007308424386792194\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.8076923076923077,\n \"acc_stderr\": 0.019982347208637282,\n\
\ \"acc_norm\": 0.8076923076923077,\n \"acc_norm_stderr\": 0.019982347208637282\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.5296296296296297,\n \"acc_stderr\": 0.030431963547936584,\n \
\ \"acc_norm\": 0.5296296296296297,\n \"acc_norm_stderr\": 0.030431963547936584\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.8319327731092437,\n \"acc_stderr\": 0.024289102115692275,\n\
\ \"acc_norm\": 0.8319327731092437,\n \"acc_norm_stderr\": 0.024289102115692275\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.543046357615894,\n \"acc_stderr\": 0.040673251742474416,\n \"\
acc_norm\": 0.543046357615894,\n \"acc_norm_stderr\": 0.040673251742474416\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.9284403669724771,\n \"acc_stderr\": 0.011051255247815481,\n \"\
acc_norm\": 0.9284403669724771,\n \"acc_norm_stderr\": 0.011051255247815481\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.6759259259259259,\n \"acc_stderr\": 0.03191923445686186,\n \"\
acc_norm\": 0.6759259259259259,\n \"acc_norm_stderr\": 0.03191923445686186\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.9215686274509803,\n \"acc_stderr\": 0.01886951464665892,\n \"\
acc_norm\": 0.9215686274509803,\n \"acc_norm_stderr\": 0.01886951464665892\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.8945147679324894,\n \"acc_stderr\": 0.019995560723758535,\n \
\ \"acc_norm\": 0.8945147679324894,\n \"acc_norm_stderr\": 0.019995560723758535\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8116591928251121,\n\
\ \"acc_stderr\": 0.026241132996407252,\n \"acc_norm\": 0.8116591928251121,\n\
\ \"acc_norm_stderr\": 0.026241132996407252\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.8778625954198473,\n \"acc_stderr\": 0.02871877688934232,\n\
\ \"acc_norm\": 0.8778625954198473,\n \"acc_norm_stderr\": 0.02871877688934232\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.8677685950413223,\n \"acc_stderr\": 0.0309227883204458,\n \"acc_norm\"\
: 0.8677685950413223,\n \"acc_norm_stderr\": 0.0309227883204458\n },\n\
\ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8518518518518519,\n\
\ \"acc_stderr\": 0.03434300243630999,\n \"acc_norm\": 0.8518518518518519,\n\
\ \"acc_norm_stderr\": 0.03434300243630999\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.8588957055214724,\n \"acc_stderr\": 0.027351605518389752,\n\
\ \"acc_norm\": 0.8588957055214724,\n \"acc_norm_stderr\": 0.027351605518389752\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.6785714285714286,\n\
\ \"acc_stderr\": 0.04432804055291518,\n \"acc_norm\": 0.6785714285714286,\n\
\ \"acc_norm_stderr\": 0.04432804055291518\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.8640776699029126,\n \"acc_stderr\": 0.03393295729761011,\n\
\ \"acc_norm\": 0.8640776699029126,\n \"acc_norm_stderr\": 0.03393295729761011\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9401709401709402,\n\
\ \"acc_stderr\": 0.015537514263253878,\n \"acc_norm\": 0.9401709401709402,\n\
\ \"acc_norm_stderr\": 0.015537514263253878\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.88,\n \"acc_stderr\": 0.032659863237109066,\n \
\ \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.032659863237109066\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9195402298850575,\n\
\ \"acc_stderr\": 0.009726831316141866,\n \"acc_norm\": 0.9195402298850575,\n\
\ \"acc_norm_stderr\": 0.009726831316141866\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.846820809248555,\n \"acc_stderr\": 0.019390370108969934,\n\
\ \"acc_norm\": 0.846820809248555,\n \"acc_norm_stderr\": 0.019390370108969934\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.5642458100558659,\n\
\ \"acc_stderr\": 0.016583881958602397,\n \"acc_norm\": 0.5642458100558659,\n\
\ \"acc_norm_stderr\": 0.016583881958602397\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.8562091503267973,\n \"acc_stderr\": 0.020091188936043714,\n\
\ \"acc_norm\": 0.8562091503267973,\n \"acc_norm_stderr\": 0.020091188936043714\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.8456591639871383,\n\
\ \"acc_stderr\": 0.02051905034208471,\n \"acc_norm\": 0.8456591639871383,\n\
\ \"acc_norm_stderr\": 0.02051905034208471\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.8827160493827161,\n \"acc_stderr\": 0.017903112615281123,\n\
\ \"acc_norm\": 0.8827160493827161,\n \"acc_norm_stderr\": 0.017903112615281123\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.6276595744680851,\n \"acc_stderr\": 0.028838921471251455,\n \
\ \"acc_norm\": 0.6276595744680851,\n \"acc_norm_stderr\": 0.028838921471251455\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.6258148631029987,\n\
\ \"acc_stderr\": 0.012359335618172063,\n \"acc_norm\": 0.6258148631029987,\n\
\ \"acc_norm_stderr\": 0.012359335618172063\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.8272058823529411,\n \"acc_stderr\": 0.02296606758558181,\n\
\ \"acc_norm\": 0.8272058823529411,\n \"acc_norm_stderr\": 0.02296606758558181\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.8202614379084967,\n \"acc_stderr\": 0.01553374508338279,\n \
\ \"acc_norm\": 0.8202614379084967,\n \"acc_norm_stderr\": 0.01553374508338279\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7545454545454545,\n\
\ \"acc_stderr\": 0.04122066502878285,\n \"acc_norm\": 0.7545454545454545,\n\
\ \"acc_norm_stderr\": 0.04122066502878285\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7959183673469388,\n \"acc_stderr\": 0.0258012834750905,\n\
\ \"acc_norm\": 0.7959183673469388,\n \"acc_norm_stderr\": 0.0258012834750905\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8905472636815921,\n\
\ \"acc_stderr\": 0.022076326101824667,\n \"acc_norm\": 0.8905472636815921,\n\
\ \"acc_norm_stderr\": 0.022076326101824667\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.96,\n \"acc_stderr\": 0.01969463855669321,\n \
\ \"acc_norm\": 0.96,\n \"acc_norm_stderr\": 0.01969463855669321\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n\
\ \"acc_stderr\": 0.03858158940685515,\n \"acc_norm\": 0.5662650602409639,\n\
\ \"acc_norm_stderr\": 0.03858158940685515\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8830409356725146,\n \"acc_stderr\": 0.02464806896136616,\n\
\ \"acc_norm\": 0.8830409356725146,\n \"acc_norm_stderr\": 0.02464806896136616\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3671970624235006,\n\
\ \"mc1_stderr\": 0.01687480500145318,\n \"mc2\": 0.5257567284522894,\n\
\ \"mc2_stderr\": 0.014743557767765337\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.824782951854775,\n \"acc_stderr\": 0.010684179227706167\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7210007581501138,\n \
\ \"acc_stderr\": 0.012354115779970311\n }\n}\n```"
repo_url: https://huggingface.co/CausalLM/72B-preview
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|arc:challenge|25_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|arc:challenge|25_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|gsm8k|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|gsm8k|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hellaswag|10_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hellaswag|10_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-12-09T20-37-44.242475.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-12-09T21-42-26.382618.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-management|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-management|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-virology|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-virology|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|truthfulqa:mc|0_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|truthfulqa:mc|0_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-12-09T21-42-26.382618.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- '**/details_harness|winogrande|5_2023-12-09T20-37-44.242475.parquet'
- split: 2023_12_09T21_42_26.382618
path:
- '**/details_harness|winogrande|5_2023-12-09T21-42-26.382618.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-12-09T21-42-26.382618.parquet'
- config_name: results
data_files:
- split: 2023_12_09T20_37_44.242475
path:
- results_2023-12-09T20-37-44.242475.parquet
- split: 2023_12_09T21_42_26.382618
path:
- results_2023-12-09T21-42-26.382618.parquet
- split: latest
path:
- results_2023-12-09T21-42-26.382618.parquet
---
# Dataset Card for Evaluation run of CausalLM/72B-preview
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/CausalLM/72B-preview
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [CausalLM/72B-preview](https://huggingface.co/CausalLM/72B-preview) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_CausalLM__72B-preview",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-12-09T21:42:26.382618](https://huggingface.co/datasets/open-llm-leaderboard/details_CausalLM__72B-preview/blob/main/results_2023-12-09T21-42-26.382618.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.7667362936260237,
"acc_stderr": 0.027929321227362417,
"acc_norm": 0.7704368351697709,
"acc_norm_stderr": 0.028461947646281283,
"mc1": 0.3671970624235006,
"mc1_stderr": 0.01687480500145318,
"mc2": 0.5257567284522894,
"mc2_stderr": 0.014743557767765337
},
"harness|arc:challenge|25": {
"acc": 0.606655290102389,
"acc_stderr": 0.014275101465693024,
"acc_norm": 0.6518771331058021,
"acc_norm_stderr": 0.013921008595179347
},
"harness|hellaswag|10": {
"acc": 0.6468830910177256,
"acc_stderr": 0.004769618829196502,
"acc_norm": 0.8323043218482374,
"acc_norm_stderr": 0.0037283229688748914
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.42,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.42,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.7407407407407407,
"acc_stderr": 0.03785714465066653,
"acc_norm": 0.7407407407407407,
"acc_norm_stderr": 0.03785714465066653
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.9144736842105263,
"acc_stderr": 0.02275867713088861,
"acc_norm": 0.9144736842105263,
"acc_norm_stderr": 0.02275867713088861
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.8301886792452831,
"acc_stderr": 0.023108393799841326,
"acc_norm": 0.8301886792452831,
"acc_norm_stderr": 0.023108393799841326
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.8958333333333334,
"acc_stderr": 0.025545239210256917,
"acc_norm": 0.8958333333333334,
"acc_norm_stderr": 0.025545239210256917
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.53,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.53,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.63,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.63,
"acc_norm_stderr": 0.048523658709391
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.7803468208092486,
"acc_stderr": 0.031568093627031744,
"acc_norm": 0.7803468208092486,
"acc_norm_stderr": 0.031568093627031744
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.5392156862745098,
"acc_stderr": 0.04959859966384181,
"acc_norm": 0.5392156862745098,
"acc_norm_stderr": 0.04959859966384181
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.84,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.84,
"acc_norm_stderr": 0.03684529491774709
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.8,
"acc_stderr": 0.026148818018424502,
"acc_norm": 0.8,
"acc_norm_stderr": 0.026148818018424502
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5701754385964912,
"acc_stderr": 0.04657047260594963,
"acc_norm": 0.5701754385964912,
"acc_norm_stderr": 0.04657047260594963
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.8,
"acc_stderr": 0.0333333333333333,
"acc_norm": 0.8,
"acc_norm_stderr": 0.0333333333333333
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.6798941798941799,
"acc_stderr": 0.024026846392873506,
"acc_norm": 0.6798941798941799,
"acc_norm_stderr": 0.024026846392873506
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.04444444444444449,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.04444444444444449
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.8903225806451613,
"acc_stderr": 0.017776778700485173,
"acc_norm": 0.8903225806451613,
"acc_norm_stderr": 0.017776778700485173
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.6600985221674877,
"acc_stderr": 0.033327690684107895,
"acc_norm": 0.6600985221674877,
"acc_norm_stderr": 0.033327690684107895
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.78,
"acc_stderr": 0.04163331998932263,
"acc_norm": 0.78,
"acc_norm_stderr": 0.04163331998932263
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8606060606060606,
"acc_stderr": 0.0270459488258654,
"acc_norm": 0.8606060606060606,
"acc_norm_stderr": 0.0270459488258654
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9444444444444444,
"acc_stderr": 0.0163199507007674,
"acc_norm": 0.9444444444444444,
"acc_norm_stderr": 0.0163199507007674
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9896373056994818,
"acc_stderr": 0.007308424386792194,
"acc_norm": 0.9896373056994818,
"acc_norm_stderr": 0.007308424386792194
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.8076923076923077,
"acc_stderr": 0.019982347208637282,
"acc_norm": 0.8076923076923077,
"acc_norm_stderr": 0.019982347208637282
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.5296296296296297,
"acc_stderr": 0.030431963547936584,
"acc_norm": 0.5296296296296297,
"acc_norm_stderr": 0.030431963547936584
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.8319327731092437,
"acc_stderr": 0.024289102115692275,
"acc_norm": 0.8319327731092437,
"acc_norm_stderr": 0.024289102115692275
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.543046357615894,
"acc_stderr": 0.040673251742474416,
"acc_norm": 0.543046357615894,
"acc_norm_stderr": 0.040673251742474416
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9284403669724771,
"acc_stderr": 0.011051255247815481,
"acc_norm": 0.9284403669724771,
"acc_norm_stderr": 0.011051255247815481
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6759259259259259,
"acc_stderr": 0.03191923445686186,
"acc_norm": 0.6759259259259259,
"acc_norm_stderr": 0.03191923445686186
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.9215686274509803,
"acc_stderr": 0.01886951464665892,
"acc_norm": 0.9215686274509803,
"acc_norm_stderr": 0.01886951464665892
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8945147679324894,
"acc_stderr": 0.019995560723758535,
"acc_norm": 0.8945147679324894,
"acc_norm_stderr": 0.019995560723758535
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.8116591928251121,
"acc_stderr": 0.026241132996407252,
"acc_norm": 0.8116591928251121,
"acc_norm_stderr": 0.026241132996407252
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8778625954198473,
"acc_stderr": 0.02871877688934232,
"acc_norm": 0.8778625954198473,
"acc_norm_stderr": 0.02871877688934232
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8677685950413223,
"acc_stderr": 0.0309227883204458,
"acc_norm": 0.8677685950413223,
"acc_norm_stderr": 0.0309227883204458
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8518518518518519,
"acc_stderr": 0.03434300243630999,
"acc_norm": 0.8518518518518519,
"acc_norm_stderr": 0.03434300243630999
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8588957055214724,
"acc_stderr": 0.027351605518389752,
"acc_norm": 0.8588957055214724,
"acc_norm_stderr": 0.027351605518389752
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.6785714285714286,
"acc_stderr": 0.04432804055291518,
"acc_norm": 0.6785714285714286,
"acc_norm_stderr": 0.04432804055291518
},
"harness|hendrycksTest-management|5": {
"acc": 0.8640776699029126,
"acc_stderr": 0.03393295729761011,
"acc_norm": 0.8640776699029126,
"acc_norm_stderr": 0.03393295729761011
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9401709401709402,
"acc_stderr": 0.015537514263253878,
"acc_norm": 0.9401709401709402,
"acc_norm_stderr": 0.015537514263253878
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.88,
"acc_stderr": 0.032659863237109066,
"acc_norm": 0.88,
"acc_norm_stderr": 0.032659863237109066
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.9195402298850575,
"acc_stderr": 0.009726831316141866,
"acc_norm": 0.9195402298850575,
"acc_norm_stderr": 0.009726831316141866
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.846820809248555,
"acc_stderr": 0.019390370108969934,
"acc_norm": 0.846820809248555,
"acc_norm_stderr": 0.019390370108969934
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.5642458100558659,
"acc_stderr": 0.016583881958602397,
"acc_norm": 0.5642458100558659,
"acc_norm_stderr": 0.016583881958602397
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8562091503267973,
"acc_stderr": 0.020091188936043714,
"acc_norm": 0.8562091503267973,
"acc_norm_stderr": 0.020091188936043714
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.8456591639871383,
"acc_stderr": 0.02051905034208471,
"acc_norm": 0.8456591639871383,
"acc_norm_stderr": 0.02051905034208471
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8827160493827161,
"acc_stderr": 0.017903112615281123,
"acc_norm": 0.8827160493827161,
"acc_norm_stderr": 0.017903112615281123
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.6276595744680851,
"acc_stderr": 0.028838921471251455,
"acc_norm": 0.6276595744680851,
"acc_norm_stderr": 0.028838921471251455
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.6258148631029987,
"acc_stderr": 0.012359335618172063,
"acc_norm": 0.6258148631029987,
"acc_norm_stderr": 0.012359335618172063
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.8272058823529411,
"acc_stderr": 0.02296606758558181,
"acc_norm": 0.8272058823529411,
"acc_norm_stderr": 0.02296606758558181
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.8202614379084967,
"acc_stderr": 0.01553374508338279,
"acc_norm": 0.8202614379084967,
"acc_norm_stderr": 0.01553374508338279
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7545454545454545,
"acc_stderr": 0.04122066502878285,
"acc_norm": 0.7545454545454545,
"acc_norm_stderr": 0.04122066502878285
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7959183673469388,
"acc_stderr": 0.0258012834750905,
"acc_norm": 0.7959183673469388,
"acc_norm_stderr": 0.0258012834750905
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8905472636815921,
"acc_stderr": 0.022076326101824667,
"acc_norm": 0.8905472636815921,
"acc_norm_stderr": 0.022076326101824667
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.96,
"acc_stderr": 0.01969463855669321,
"acc_norm": 0.96,
"acc_norm_stderr": 0.01969463855669321
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5662650602409639,
"acc_stderr": 0.03858158940685515,
"acc_norm": 0.5662650602409639,
"acc_norm_stderr": 0.03858158940685515
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8830409356725146,
"acc_stderr": 0.02464806896136616,
"acc_norm": 0.8830409356725146,
"acc_norm_stderr": 0.02464806896136616
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3671970624235006,
"mc1_stderr": 0.01687480500145318,
"mc2": 0.5257567284522894,
"mc2_stderr": 0.014743557767765337
},
"harness|winogrande|5": {
"acc": 0.824782951854775,
"acc_stderr": 0.010684179227706167
},
"harness|gsm8k|5": {
"acc": 0.7210007581501138,
"acc_stderr": 0.012354115779970311
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
RealTimeData/arxiv_july_week2_2023 | ---
dataset_info:
features:
- name: entry_id
dtype: string
- name: published
dtype: string
- name: title
dtype: string
- name: authors
sequence: string
- name: primary_category
dtype: string
- name: categories
sequence: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 131449614
num_examples: 3069
download_size: 64855895
dataset_size: 131449614
---
# Dataset Card for "arxiv_july_week2_2023"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
ad019el/ar_data | ---
dataset_info:
features:
- name: path
dtype: string
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: sentence
dtype: string
splits:
- name: train
num_bytes: 40579164.0
num_examples: 1500
- name: test
num_bytes: 15846990.0
num_examples: 500
download_size: 55259208
dataset_size: 56426154.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
# Dataset Card for "ar_data"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
ShenaoZhang/0.001_idpo_same_nodpo_dataset | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: prompt_id
dtype: string
- name: messages
list:
- name: content
dtype: string
- name: role
dtype: string
- name: score_chosen
dtype: float64
- name: score_rejected
dtype: float64
- name: reference_response
dtype: string
- name: chosen
list:
- name: content
dtype: string
- name: role
dtype: string
- name: rejected
list:
- name: content
dtype: string
- name: role
dtype: string
- name: is_better
dtype: bool
splits:
- name: train_prefs_1
num_bytes: 154691950
num_examples: 20378
- name: test_prefs_1
num_bytes: 15221730
num_examples: 2000
- name: train_prefs_2
num_bytes: 188487562
num_examples: 20378
- name: test_prefs_2
num_bytes: 18441786
num_examples: 2000
- name: train_prefs_3
num_bytes: 174781446
num_examples: 20378
- name: test_prefs_3
num_bytes: 16959398
num_examples: 2000
download_size: 309771174
dataset_size: 568583872
configs:
- config_name: default
data_files:
- split: train_prefs_1
path: data/train_prefs_1-*
- split: test_prefs_1
path: data/test_prefs_1-*
- split: train_prefs_2
path: data/train_prefs_2-*
- split: test_prefs_2
path: data/test_prefs_2-*
- split: train_prefs_3
path: data/train_prefs_3-*
- split: test_prefs_3
path: data/test_prefs_3-*
---
# Dataset Card for "0.001_idpo_same_nodpo_dataset"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
maghwa/OpenHermes-2-AR-10K-29-710k-720k | ---
dataset_info:
features:
- name: source
dtype: string
- name: hash
dtype: 'null'
- name: category
dtype: 'null'
- name: system_prompt
dtype: 'null'
- name: model_name
dtype: 'null'
- name: language
dtype: 'null'
- name: views
dtype: float64
- name: conversations
dtype: string
- name: topic
dtype: 'null'
- name: id
dtype: 'null'
- name: avatarUrl
dtype: 'null'
- name: custom_instruction
dtype: 'null'
- name: skip_prompt_formatting
dtype: 'null'
- name: idx
dtype: 'null'
- name: title
dtype: 'null'
- name: model
dtype: 'null'
splits:
- name: train
num_bytes: 25440345
num_examples: 10001
download_size: 11528460
dataset_size: 25440345
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
AdapterOcean/med_alpaca_standardized_cluster_34_std | ---
dataset_info:
features:
- name: message
dtype: string
- name: message_type
dtype: string
- name: message_id
dtype: int64
- name: conversation_id
dtype: int64
- name: cluster
dtype: float64
- name: __index_level_0__
dtype: int64
splits:
- name: train
num_bytes: 14112922
num_examples: 22085
download_size: 6935670
dataset_size: 14112922
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "med_alpaca_standardized_cluster_34_std"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Vitrola40/RobertoCarlos9091 | ---
license: openrail
---
|
poopat/gad | ---
license: unknown
---
|
autoevaluate/autoeval-staging-eval-project-cnn_dailymail-ca1f103f-12035606 | ---
type: predictions
tags:
- autotrain
- evaluation
datasets:
- cnn_dailymail
eval_info:
task: summarization
model: nbroad/longt5-base-global-mediasum
metrics: []
dataset_name: cnn_dailymail
dataset_config: 3.0.0
dataset_split: test
col_mapping:
text: article
target: highlights
---
# Dataset Card for AutoTrain Evaluator
This repository contains model predictions generated by [AutoTrain](https://huggingface.co/autotrain) for the following task and dataset:
* Task: Summarization
* Model: nbroad/longt5-base-global-mediasum
* Dataset: cnn_dailymail
* Config: 3.0.0
* Split: test
To run new evaluation jobs, visit Hugging Face's [automatic model evaluator](https://huggingface.co/spaces/autoevaluate/model-evaluator).
## Contributions
Thanks to [@nbroad](https://huggingface.co/nbroad) for evaluating this model. |
sidhellman/test | ---
license: mit
---
|
Chidvi201/Twitter_Data.csv | ---
license: unknown
---
|
result-muse256-muse512-wuerst-sdv15/c3f0d903 | ---
dataset_info:
features:
- name: result
dtype: string
- name: id
dtype: int64
splits:
- name: train
num_bytes: 180
num_examples: 10
download_size: 1372
dataset_size: 180
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "c3f0d903"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Multimodal-Fatima/Caltech101_not_background_test_facebook_opt_125m_Attributes_Caption_ns_5647 | ---
dataset_info:
features:
- name: id
dtype: int64
- name: image
dtype: image
- name: prompt
dtype: string
- name: true_label
dtype: string
- name: prediction
dtype: string
- name: scores
sequence: float64
splits:
- name: fewshot_0_bs_16
num_bytes: 84344156.125
num_examples: 5647
- name: fewshot_1_bs_16
num_bytes: 85792185.125
num_examples: 5647
- name: fewshot_3_bs_16
num_bytes: 88692887.125
num_examples: 5647
- name: fewshot_5_bs_16
num_bytes: 91584891.125
num_examples: 5647
- name: fewshot_8_bs_16
num_bytes: 95914176.125
num_examples: 5647
download_size: 416469739
dataset_size: 446328295.625
---
# Dataset Card for "Caltech101_not_background_test_facebook_opt_125m_Attributes_Caption_ns_5647"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
chirunder/text_messages | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 786735647
num_examples: 11615290
download_size: 563363348
dataset_size: 786735647
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "text_messages"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Dewa/Dog_Emotion_Dataset_v2 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
dataset_info:
features:
- name: label
dtype: int64
- name: emotion
dtype: string
- name: image
dtype: image
splits:
- name: train
num_bytes: 128018890.4
num_examples: 3200
- name: test
num_bytes: 31722930.4
num_examples: 800
download_size: 162369679
dataset_size: 159741820.8
license: creativeml-openrail-m
task_categories:
- image-classification
size_categories:
- 1K<n<10K
---
# Dataset Card for "Dog_Emotion_Dataset_v2"
- The Dataset is based on a `kaggle` dataset
# Label and its Meaning
- `0 : sad"`
- `1 : angry"`
- `2 : relaxed"`
- `3 : happy"` |
intfloat/wikipedia | ---
size_categories:
- 100M<n<1B
---
### Dataset Summary
This dataset is based on [olm/wikipedia](https://huggingface.co/datasets/olm/wikipedia).
The main difference is that we add `Section::::` prefix to each section title to keep the section structure information.
We also use `:` to join the hierarchical section titles.
Following is an example.
```text
Alison Jane Horner (born June 1966) is a British businesswoman, and, until it was sold in 2020, was the CEO of the Asian arm of the Tesco supermarket chain.
Section::::Early life
Alison Jane Horner was born in June 1966. She earned a bachelor's degree in chemistry from the University of Manchester, and an MBA from Manchester Business School.
Section::::Career
Section::::Career:Tesco
Horner joined Tesco as a personnel manager in 1999 and was on Tesco's executive committee from 2011.
In October 2013, Horner became a founding member of The Guardian's Women in Leadership network. in 2015, she became a member of Alliance Manchester Business School's advisory board.
Horner was Tesco' chief people officer (chief human resources officer) of Tesco until May 2018, when she was promoted to be chief executive of Tesco's Asia business in Malaysia and Thailand, until it was sold in late 2020. She was set to step down in February 2021 after 22 years with Tesco.
Section::::Career:Carillion non-executive role
Horner was a non-executive director of Carillion from December 2013, chairing the remuneration committee from June 2014. As of 30 December 2016 her basic compensation was £61,000. After the company went into liquidation in January 2018, Horner was one of the non-executive directors who gave evidence to the House of Commons Business and Work and Pensions select committees on 6 February 2018. In the final report of the Parliamentary Inquiry, published on 16 May 2018, Horner was criticised by MPs; the report concluded:
"... Alison Horner presided over growing salaries and bonuses at the top of the company as its performance faltered. In her evidence to us, she sought to justify her approach by pointing to industry standards, the guidance of advisors, and conversations with shareholders. She failed to demonstrate to us any sense of challenge to the advice she was given, any concern about the views of stakeholders, or any regret at the largesse at the top of Carillion. Ms Horner continues to hold the role of Chief People Officer of Tesco, where she has responsibilities to more than half a million employees. We hope that, in that post, she will reflect on the lessons learned from Carillion and her role in its collapse."
In January 2021, the Insolvency Service said it would seek to ban eight former Carillion directors, including Horner, from holding senior boardroom positions.
Section::::References
Living people
1966 births
British businesspeople in retailing
Tesco people
Alumni of the University of Manchester
Alumni of the Manchester Business School
Carillion people
```
### Data Fields
- `title`: a `string` feature.
- `text`: a `string` feature.
### How to use this dataset
To load this dataset you need to install these first:
```shell
pip install mwparserfromhell==0.6.4 multiprocess==0.70.13
```
Then, you can load any subset of Wikipedia per language and per date this way:
```python
from datasets import load_dataset
dataset = load_dataset("intfloat/wikipedia", language="en", date="20230401")
```
For more information,
please check out [olm/wikipedia](https://huggingface.co/datasets/olm/wikipedia).
## Supported Languages
```
aa
ab
ace
ady
af
ak
als
am
an
ang
ar
arc
arz
as
ast
atj
av
ay
az
azb
ba
bar
bat-smg
bcl
be
be-x-old
bg
bh
bi
bjn
bm
bn
bo
bpy
br
bs
bug
bxr
ca
cbk-zam
cdo
ce
ceb
ch
cho
chr
chy
ckb
co
cr
crh
cs
csb
cu
cv
cy
da
de
din
diq
dsb
dty
dv
dz
ee
el
eml
en
eo
es
et
eu
ext
fa
ff
fi
fiu-vro
fj
fo
fr
frp
frr
fur
fy
ga
gag
gan
gd
gl
glk
gn
gom
gor
got
gu
gv
ha
hak
haw
he
hi
hif
ho
hr
hsb
ht
hu
hy
ia
id
ie
ig
ii
ik
ilo
inh
io
is
it
iu
ja
jam
jbo
jv
ka
kaa
kab
kbd
kbp
kg
ki
kj
kk
kl
km
kn
ko
koi
krc
ks
ksh
ku
kv
kw
ky
la
lad
lb
lbe
lez
lfn
lg
li
lij
lmo
ln
lo
lrc
lt
ltg
lv
mai
map-bms
mdf
mg
mh
mhr
mi
min
mk
ml
mn
mr
mrj
ms
mt
mus
mwl
my
myv
mzn
na
nah
nap
nds
nds-nl
ne
new
ng
nl
nn
no
nov
nrm
nso
nv
ny
oc
olo
om
or
os
pa
pag
pam
pap
pcd
pdc
pfl
pi
pih
pl
pms
pnb
pnt
ps
pt
qu
rm
rmy
rn
ro
roa-rup
roa-tara
ru
rue
rw
sa
sah
sat
sc
scn
sco
sd
se
sg
sh
si
simple
sk
sl
sm
sn
so
sq
sr
srn
ss
st
stq
su
sv
sw
szl
ta
tcy
te
tet
tg
th
ti
tk
tl
tn
to
tpi
tr
ts
tt
tum
tw
ty
tyv
udm
ug
uk
ur
uz
ve
vec
vep
vi
vls
vo
wa
war
wo
wuu
xal
xh
xmf
yi
yo
za
zea
zh
zh-classical
zh-min-nan
zh-yue
zu
``` |
gymprathap/Handwriting-Recognition-Dataset | ---
license: apache-2.0
task_categories:
- token-classification
language:
- en
tags:
- code
pretty_name: Handwriting Recognition Dataset
size_categories:
- 1K<n<10K
---
The dataset comprises over four hundred thousand handwritten names obtained from charitable initiatives.
Character Recognition employs image processing techniques to transform characters present on scanned documents into digital formats. It generally exhibits good performance with machine-printed fonts. Nonetheless, machines still encounter formidable obstacles in accurately identifying handwritten characters due to the vast diversity in individual writing styles.
The total number of first names is 206,799, while the total number of surnames is 207,024. The data was partitioned into a training set (331,059 samples), testing set (41,382 samples), and validation set (41,382 samples) respectively.
FYI: I am not the owner of this dataset. I took this dataset from kaggle. It is very interesting and useful dataset for many computer vision application.
<a href="http://projectcentersinchennai.co.in" title="Project Centers in Chennai">Project Centers in Chennai</a> |
ilsp/truthful_qa_greek | ---
language: el
license: cc-by-nc-sa-4.0
multilinguality: monolingual
size_categories: 1K<n<10K
task_categories:
- multiple-choice
- text-generation
pretty_name: Truthful QA Greek
dataset_info:
- config_name: default
splits:
- name: generation
num_examples: 817
- name: multiple_choice
num_examples: 817
- config_name: generation
features:
- name: type
dtype: string
- name: category
dtype: string
- name: question
dtype: string
- name: best_answer
dtype: string
- name: correct_answers
sequence: string
- name: incorrect_answers
sequence: string
- name: source
dtype: string
- name: question_en
dtype: string
- name: best_answer_en
dtype: string
- name: correct_answers_en
sequence: string
- name: incorrect_answers_en
sequence: string
- name: question_mt
dtype: string
- name: best_answer_mt
dtype: string
- name: correct_answers_mt
sequence: string
- name: incorrect_answers_mt
sequence: string
splits:
- name: train
num_bytes: 2102161
num_examples: 817
download_size: 0
dataset_size: 2102161
- config_name: multiple_choice
features:
- name: question
dtype: string
- name: mc1_targets
struct:
- name: choices
sequence: string
- name: labels
sequence: int32
- name: mc2_targets
struct:
- name: choices
sequence: string
- name: labels
sequence: int32
- name: question_en
dtype: string
- name: mc1_targets_en
struct:
- name: choices
sequence: string
- name: labels
sequence: int32
- name: mc2_targets_en
struct:
- name: choices
sequence: string
- name: labels
sequence: int32
- name: question_mt
dtype: string
- name: mc1_targets_mt
struct:
- name: choices
sequence: string
- name: labels
sequence: int32
- name: mc2_targets_mt
struct:
- name: choices
sequence: string
- name: labels
sequence: int32
splits:
- name: train
num_bytes: 3034225
num_examples: 817
download_size: 0
dataset_size: 3034225
configs:
- config_name: generation
data_files:
- split: train
path: generation/train-*
- config_name: multiple_choice
data_files:
- split: train
path: multiple_choice/train-*
---
# Dataset Card for Truthful QA Greek
The Truthful QA Greek dataset is a set of 817 questions from the [Truthful QA](https://huggingface.co/datasets/truthful_qa) dataset, translated into Greek. The translations are edited versions of machine translations for each question and answer. The machine translations are also provided. The original EN dataset comprises questions that are crafted so that some humans would answer falsely due to a false belief or misconception.
## Dataset Details
### Dataset Description
<!-- -->
- **Curated by:** ILSP/Athena RC
<!--- **Funded by [optional]:** [More Information Needed]-->
<!--- **Shared by [optional]:** [More Information Needed]-->
- **Language(s) (NLP):** el
- **License:** cc-by-nc-sa-4.0
<!--### Dataset Sources [optional]-->
<!-- Provide the basic links for the dataset. -->
<!--- **Repository:** [More Information Needed]-->
<!--- **Paper [optional]:** [More Information Needed]-->
<!--- **Demo [optional]:** [More Information Needed]-->
<!--## Uses-->
<!-- Address questions around how the dataset is intended to be used. -->
<!--### Direct Use-->
<!-- This section describes suitable use cases for the dataset. -->
<!--[More Information Needed]-->
<!--### Out-of-Scope Use-->
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
<!--[More Information Needed]-->
<!--## Dataset Structure-->
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
<!--[More Information Needed]-->
<!--## Dataset Creation-->
<!--### Curation Rationale-->
<!-- Motivation for the creation of this dataset. -->
<!--[More Information Needed]-->
<!--### Source Data-->
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
<!--#### Data Collection and Processing-->
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
<!--[More Information Needed]-->
<!--#### Who are the source data producers?-->
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
<!--[More Information Needed]-->
<!--### Annotations [optional]-->
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
<!--#### Annotation process-->
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
<!--[More Information Needed]-->
<!--#### Who are the annotators?-->
<!-- This section describes the people or systems who created the annotations. -->
<!--[More Information Needed]-->
<!--#### Personal and Sensitive Information-->
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
<!--[More Information Needed]-->
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
This dataset is the result of editing machine translation.
<!--### Recommendations-->
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
<!--Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.-->
<!--## Citation-->
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
<!--**BibTeX:**-->
<!--[More Information Needed]-->
<!--**APA:**-->
<!--[More Information Needed]-->
<!--## Glossary [optional]-->
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
<!--[More Information Needed]-->
<!--## More Information [optional]-->
<!--[More Information Needed]-->
<!--## Dataset Card Authors [optional]-->
<!--[More Information Needed]-->
## Dataset Card Contact
https://www.athenarc.gr/en/ilsp |
niicovila/law_small | ---
license: artistic-2.0
---
|
centroIA/ZephyrFormat | ---
dataset_info:
features:
- name: input
dtype: string
- name: output
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 2664771
num_examples: 967
download_size: 706398
dataset_size: 2664771
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
hugginglearners/reddit-depression-cleaned | ---
license:
- cc0-1.0
kaggle_id: infamouscoder/depression-reddit-cleaned
---
# Dataset Card for Depression: Reddit Dataset (Cleaned)
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** https://kaggle.com/datasets/infamouscoder/depression-reddit-cleaned
- **Repository:**
- **Paper:**
- **Leaderboard:**
- **Point of Contact:**
### Dataset Summary
The raw data is collected through web scrapping Subreddits and is cleaned using multiple NLP techniques. The data is only in English language. It mainly targets mental health classification.
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
This dataset was shared by [@infamouscoder](https://kaggle.com/infamouscoder)
### Licensing Information
The license for this dataset is cc0-1.0
### Citation Information
```bibtex
[More Information Needed]
```
### Contributions
[More Information Needed] |
ares1123/celebrity_dataset | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': Aaron Eckhart
'1': Aaron Paul
'2': Aaron Rodgers
'3': Aaron Taylor-Johnson
'4': Abbi Jacobson
'5': Abhishek Bachchan
'6': Abigail Breslin
'7': Abigail Spencer
'8': Adam Brody
'9': Adam Devine
'10': Adam Driver
'11': Adam Lambert
'12': Adam Levine
'13': Adam Sandler
'14': Adam Scott
'15': Adele
'16': Adrian Grenier
'17': Adèle Exarchopoulos
'18': Aidan Gillen
'19': Aidan Turner
'20': Aishwarya Rai
'21': Aja Naomi King
'22': Alden Ehrenreich
'23': Aldis Hodge
'24': Alec Baldwin
'25': Alex Morgan
'26': Alex Pettyfer
'27': Alex Rodriguez
'28': Alexander Skarsgård
'29': Alexandra Daddario
'30': Alfre Woodard
'31': Alia Shawkat
'32': Alice Braga
'33': Alice Eve
'34': Alicia Keys
'35': Alicia Vikander
'36': Alison Brie
'37': Allison Janney
'38': Allison Williams
'39': Alyson Hannigan
'40': Amanda Peet
'41': Amanda Seyfried
'42': Amandla Stenberg
'43': Amber Heard
'44': America Ferrera
'45': Amy Adams
'46': Amy Poehler
'47': Amy Schumer
'48': Ana de Armas
'49': Andie MacDowell
'50': Andrew Garfield
'51': Andrew Lincoln
'52': Andrew Scott
'53': Andy Garcia
'54': Andy Samberg
'55': Andy Serkis
'56': Angela Bassett
'57': Angelina Jolie
'58': Anna Camp
'59': Anna Faris
'60': Anna Kendrick
'61': Anna Paquin
'62': AnnaSophia Robb
'63': Annabelle Wallis
'64': Anne Hathaway
'65': Anne Marie
'66': Anne-Marie
'67': Ansel Elgort
'68': Anson Mount
'69': Anthony Hopkins
'70': Anthony Joshua
'71': Anthony Mackie
'72': Antonio Banderas
'73': Anya Taylor-Joy
'74': Ariana Grande
'75': Armie Hammer
'76': Ashley Judd
'77': Ashton Kutcher
'78': Aubrey Plaza
'79': Auli'i Cravalho
'80': Awkwafina
'81': Barack Obama
'82': Bella Hadid
'83': Bella Thorne
'84': Ben Barnes
'85': Ben Mendelsohn
'86': Ben Stiller
'87': Ben Whishaw
'88': Benedict Cumberbatch
'89': Benedict Wong
'90': Benicio del Toro
'91': Bill Gates
'92': Bill Hader
'93': Bill Murray
'94': Bill Pullman
'95': Bill Skarsgård
'96': Billie Eilish
'97': Billie Lourd
'98': Billy Crudup
'99': Billy Porter
'100': Blake Lively
'101': Bob Odenkirk
'102': Bonnie Wright
'103': Boyd Holbrook
'104': Brad Pitt
'105': Bradley Cooper
'106': Brendan Fraser
'107': Brian Cox
'108': Brie Larson
'109': Brittany Snow
'110': Bryan Cranston
'111': Bryce Dallas Howard
'112': Busy Philipps
'113': Caitriona Balfe
'114': Cameron Diaz
'115': Camila Cabello
'116': Camila Mendes
'117': Cardi B
'118': Carey Mulligan
'119': Carla Gugino
'120': Carrie Underwood
'121': Casey Affleck
'122': Cate Blanchett
'123': Catherine Keener
'124': Catherine Zeta-Jones
'125': Celine Dion
'126': Chace Crawford
'127': Chadwick Boseman
'128': Channing Tatum
'129': Charlie Cox
'130': Charlie Day
'131': Charlie Hunnam
'132': Charlie Plummer
'133': Charlize Theron
'134': Chiara Ferragni
'135': Chiwetel Ejiofor
'136': Chloe Bennet
'137': Chloe Grace Moretz
'138': Chloe Sevigny
'139': Chloë Grace Moretz
'140': Chloë Sevigny
'141': Chris Cooper
'142': Chris Evans
'143': Chris Hemsworth
'144': Chris Martin
'145': Chris Messina
'146': Chris Noth
'147': Chris O'Dowd
'148': Chris Pine
'149': Chris Pratt
'150': Chris Tucker
'151': Chrissy Teigen
'152': Christian Bale
'153': Christian Slater
'154': Christina Aguilera
'155': Christina Applegate
'156': Christina Hendricks
'157': Christina Milian
'158': Christina Ricci
'159': Christine Baranski
'160': Christoph Waltz
'161': Christopher Plummer
'162': Christopher Walken
'163': Cillian Murphy
'164': Claire Foy
'165': Clive Owen
'166': Clive Standen
'167': Cobie Smulders
'168': Colin Farrell
'169': Colin Firth
'170': Colin Hanks
'171': Connie Britton
'172': Conor McGregor
'173': Constance Wu
'174': Constance Zimmer
'175': Courteney Cox
'176': Cristiano Ronaldo
'177': Daisy Ridley
'178': Dak Prescott
'179': Dakota Fanning
'180': Dakota Johnson
'181': Damian Lewis
'182': Dan Stevens
'183': Danai Gurira
'184': Dane DeHaan
'185': Daniel Craig
'186': Daniel Dae Kim
'187': Daniel Day-Lewis
'188': Daniel Gillies
'189': Daniel Kaluuya
'190': Daniel Mays
'191': Daniel Radcliffe
'192': Danny DeVito
'193': Darren Criss
'194': Dave Bautista
'195': Dave Franco
'196': Dave Grohl
'197': Daveed Diggs
'198': David Attenborough
'199': David Beckham
'200': David Duchovny
'201': David Harbour
'202': David Oyelowo
'203': David Schwimmer
'204': David Tennant
'205': David Thewlis
'206': Dax Shepard
'207': Debra Messing
'208': Demi Lovato
'209': Dennis Quaid
'210': Denzel Washington
'211': Dermot Mulroney
'212': Dev Patel
'213': Diane Keaton
'214': Diane Kruger
'215': Diane Lane
'216': Diego Boneta
'217': Diego Luna
'218': Djimon Hounsou
'219': Dolly Parton
'220': Domhnall Gleeson
'221': Dominic Cooper
'222': Dominic Monaghan
'223': Dominic West
'224': Don Cheadle
'225': Donald Glover
'226': Donald Sutherland
'227': Donald Trump
'228': Dua Lipa
'229': Dwayne "The Rock" Johnson
'230': Dwayne Johnson
'231': Dylan O'Brien
'232': Ed Harris
'233': Ed Helms
'234': Ed Sheeran
'235': Eddie Murphy
'236': Eddie Redmayne
'237': Edgar Ramirez
'238': Edward Norton
'239': Eiza Gonzalez
'240': Eiza González
'241': Elijah Wood
'242': Elisabeth Moss
'243': Elisha Cuthbert
'244': Eliza Coupe
'245': Elizabeth Banks
'246': Elizabeth Debicki
'247': Elizabeth Lail
'248': Elizabeth McGovern
'249': Elizabeth Moss
'250': Elizabeth Olsen
'251': Elle Fanning
'252': Ellen DeGeneres
'253': Ellen Page
'254': Ellen Pompeo
'255': Ellie Goulding
'256': Elon Musk
'257': Emile Hirsch
'258': Emilia Clarke
'259': Emilia Fox
'260': Emily Beecham
'261': Emily Blunt
'262': Emily Browning
'263': Emily Deschanel
'264': Emily Hampshire
'265': Emily Mortimer
'266': Emily Ratajkowski
'267': Emily VanCamp
'268': Emily Watson
'269': Emma Bunton
'270': Emma Chamberlain
'271': Emma Corrin
'272': Emma Mackey
'273': Emma Roberts
'274': Emma Stone
'275': Emma Thompson
'276': Emma Watson
'277': Emmanuelle Chriqui
'278': Emmy Rossum
'279': Eoin Macken
'280': Eric Bana
'281': Ethan Hawke
'282': Eva Green
'283': Eva Longoria
'284': Eva Mendes
'285': Evan Peters
'286': Evan Rachel Wood
'287': Evangeline Lilly
'288': Ewan McGregor
'289': Ezra Miller
'290': Felicity Huffman
'291': Felicity Jones
'292': Finn Wolfhard
'293': Florence Pugh
'294': Florence Welch
'295': Forest Whitaker
'296': Freddie Highmore
'297': Freddie Prinze Jr.
'298': Freema Agyeman
'299': Freida Pinto
'300': Freya Allan
'301': Gabrielle Union
'302': Gael Garcia Bernal
'303': Gael García Bernal
'304': Gal Gadot
'305': Garrett Hedlund
'306': Gary Oldman
'307': Gemma Arterton
'308': Gemma Chan
'309': Gemma Whelan
'310': George Clooney
'311': George Lucas
'312': Gerard Butler
'313': Giancarlo Esposito
'314': Giannis Antetokounmpo
'315': Gigi Hadid
'316': Gillian Anderson
'317': Gillian Jacobs
'318': Gina Carano
'319': Gina Gershon
'320': Gina Rodriguez
'321': Ginnifer Goodwin
'322': Gisele Bundchen
'323': Glenn Close
'324': Grace Kelly
'325': Greg Kinnear
'326': Greta Gerwig
'327': Greta Scacchi
'328': Greta Thunberg
'329': Gugu Mbatha-Raw
'330': Guy Ritchie
'331': Gwen Stefani
'332': Gwendoline Christie
'333': Gwyneth Paltrow
'334': Hafthor Bjornsson
'335': Hailee Steinfeld
'336': Hailey Bieber
'337': Haley Joel Osment
'338': Halle Berry
'339': Hannah Simone
'340': Harrison Ford
'341': Harry Styles
'342': Harvey Weinstein
'343': Hayden Panettiere
'344': Hayley Atwell
'345': Helen Hunt
'346': Helen Mirren
'347': Helena Bonham Carter
'348': Henry Cavill
'349': Henry Golding
'350': Hilary Swank
'351': Himesh Patel
'352': Hozier
'353': Hugh Bonneville
'354': Hugh Dancy
'355': Hugh Grant
'356': Hugh Jackman
'357': Hugh Laurie
'358': Ian Somerhalder
'359': Idris Elba
'360': Imelda Staunton
'361': Imogen Poots
'362': Ioan Gruffudd
'363': Isabella Rossellini
'364': Isabelle Huppert
'365': Isla Fisher
'366': Issa Rae
'367': Iwan Rheon
'368': J.K. Rowling
'369': J.K. Simmons
'370': Jack Black
'371': Jack Reynor
'372': Jack Whitehall
'373': Jackie Chan
'374': Jada Pinkett Smith
'375': Jaden Smith
'376': Jaimie Alexander
'377': Jake Gyllenhaal
'378': Jake Johnson
'379': Jake T. Austin
'380': James Cameron
'381': James Corden
'382': James Franco
'383': James Marsden
'384': James McAvoy
'385': James Norton
'386': Jamie Bell
'387': Jamie Chung
'388': Jamie Dornan
'389': Jamie Foxx
'390': Jamie Lee Curtis
'391': Jamie Oliver
'392': Jane Fonda
'393': Jane Krakowski
'394': Jane Levy
'395': Jane Lynch
'396': Jane Seymour
'397': Janelle Monáe
'398': January Jones
'399': Jared Leto
'400': Jason Bateman
'401': Jason Clarke
'402': Jason Derulo
'403': Jason Isaacs
'404': Jason Momoa
'405': Jason Mraz
'406': Jason Schwartzman
'407': Jason Segel
'408': Jason Statham
'409': Jason Sudeikis
'410': Javier Bardem
'411': Jay Baruchel
'412': Jay-Z
'413': Jeff Bezos
'414': Jeff Bridges
'415': Jeff Daniels
'416': Jeff Goldblum
'417': Jeffrey Dean Morgan
'418': Jeffrey Donovan
'419': Jeffrey Wright
'420': Jemima Kirke
'421': Jenna Coleman
'422': Jenna Fischer
'423': Jenna Ortega
'424': Jennifer Aniston
'425': Jennifer Connelly
'426': Jennifer Coolidge
'427': Jennifer Esposito
'428': Jennifer Garner
'429': Jennifer Hudson
'430': Jennifer Lawrence
'431': Jennifer Lopez
'432': Jennifer Love Hewitt
'433': Jenny Slate
'434': Jeremy Irons
'435': Jeremy Renner
'436': Jeremy Strong
'437': Jerry Seinfeld
'438': Jesse Eisenberg
'439': Jesse Metcalfe
'440': Jesse Plemons
'441': Jesse Tyler Ferguson
'442': Jesse Williams
'443': Jessica Alba
'444': Jessica Biel
'445': Jessica Chastain
'446': Jessica Lange
'447': Jessie Buckley
'448': Jim Carrey
'449': Jim Parsons
'450': Joan Collins
'451': Joan Cusack
'452': Joanne Froggatt
'453': Joaquin Phoenix
'454': Jodie Comer
'455': Jodie Foster
'456': Joe Jonas
'457': Joe Keery
'458': Joel Edgerton
'459': Joel Kinnaman
'460': Joel McHale
'461': John Boyega
'462': John C. Reilly
'463': John Cena
'464': John Cho
'465': John Cleese
'466': John Corbett
'467': John David Washington
'468': John Goodman
'469': John Hawkes
'470': John Krasinski
'471': John Legend
'472': John Leguizamo
'473': John Lithgow
'474': John Malkovich
'475': John Mayer
'476': John Mulaney
'477': John Oliver
'478': John Slattery
'479': John Travolta
'480': John Turturro
'481': Johnny Depp
'482': Johnny Knoxville
'483': Jon Bernthal
'484': Jon Favreau
'485': Jon Hamm
'486': Jonah Hill
'487': Jonathan Groff
'488': Jonathan Majors
'489': Jonathan Pryce
'490': Jonathan Rhys Meyers
'491': Jordan Peele
'492': Jordana Brewster
'493': Joseph Fiennes
'494': Joseph Gordon-Levitt
'495': Josh Allen
'496': Josh Brolin
'497': Josh Gad
'498': Josh Hartnett
'499': Josh Hutcherson
'500': Josh Radnor
'501': Jude Law
'502': Judy Dench
'503': Judy Greer
'504': Julia Garner
'505': Julia Louis-Dreyfus
'506': Julia Roberts
'507': Julia Stiles
'508': Julian Casablancas
'509': Julian McMahon
'510': Julianna Margulies
'511': Julianne Hough
'512': Julianne Moore
'513': Julianne Nicholson
'514': Juliette Binoche
'515': Juliette Lewis
'516': Juno Temple
'517': Jurnee Smollett
'518': Justin Bartha
'519': Justin Bieber
'520': Justin Hartley
'521': Justin Herbert
'522': Justin Long
'523': Justin Theroux
'524': Justin Timberlake
'525': KJ Apa
'526': Kaitlyn Dever
'527': Kaley Cuoco
'528': Kanye West
'529': Karl Urban
'530': Kat Dennings
'531': Kate Beckinsale
'532': Kate Bosworth
'533': Kate Hudson
'534': Kate Mara
'535': Kate Middleton
'536': Kate Upton
'537': Kate Walsh
'538': Kate Winslet
'539': Katee Sackhoff
'540': Katherine Heigl
'541': Katherine Langford
'542': Katherine Waterston
'543': Kathryn Hahn
'544': Katie Holmes
'545': Katie McGrath
'546': Katy Perry
'547': Kaya Scodelario
'548': Keanu Reeves
'549': Keegan-Michael Key
'550': Keira Knightley
'551': Keke Palmer
'552': Kelly Clarkson
'553': Kelly Macdonald
'554': Kelly Marie Tran
'555': Kelly Reilly
'556': Kelly Ripa
'557': Kelvin Harrison Jr.
'558': Keri Russell
'559': Kerry Washington
'560': Kevin Bacon
'561': Kevin Costner
'562': Kevin Hart
'563': Kevin Spacey
'564': Ki Hong Lee
'565': Kiefer Sutherland
'566': Kieran Culkin
'567': Kiernan Shipka
'568': Kim Dickens
'569': Kim Kardashian
'570': Kirsten Dunst
'571': Kit Harington
'572': Kourtney Kardashian
'573': Kristen Bell
'574': Kristen Stewart
'575': Kristen Wiig
'576': Kristin Davis
'577': Krysten Ritter
'578': Kyle Chandler
'579': Kylie Jenner
'580': Kylie Minogue
'581': Lady Gaga
'582': Lake Bell
'583': Lakeith Stanfield
'584': Lamar Jackson
'585': Lana Del Rey
'586': Laura Dern
'587': Laura Harrier
'588': Laura Linney
'589': Laura Prepon
'590': Laurence Fishburne
'591': Laverne Cox
'592': LeBron James
'593': Lea Michele
'594': Lea Seydoux
'595': Lee Pace
'596': Leighton Meester
'597': Lena Headey
'598': Leonardo Da Vinci
'599': Leonardo DiCaprio
'600': Leslie Mann
'601': Leslie Odom Jr.
'602': Lewis Hamilton
'603': Liam Hemsworth
'604': Liam Neeson
'605': Lili Reinhart
'606': Lily Aldridge
'607': Lily Allen
'608': Lily Collins
'609': Lily James
'610': Lily Rabe
'611': Lily Tomlin
'612': Lin-Manuel Miranda
'613': Linda Cardellini
'614': Lionel Messi
'615': Lisa Bonet
'616': Lisa Kudrow
'617': Liv Tyler
'618': Lizzo
'619': Logan Lerman
'620': Lorde
'621': Lucy Boynton
'622': Lucy Hale
'623': Lucy Lawless
'624': Lucy Liu
'625': Luke Evans
'626': Luke Perry
'627': Luke Wilson
'628': Lupita Nyong'o
'629': Léa Seydoux
'630': Mackenzie Davis
'631': Madelaine Petsch
'632': Mads Mikkelsen
'633': Mae Whitman
'634': Maggie Gyllenhaal
'635': Maggie Q
'636': Maggie Siff
'637': Maggie Smith
'638': Mahershala Ali
'639': Mahira Khan
'640': Maisie Richardson-Sellers
'641': Maisie Williams
'642': Mandy Moore
'643': Mandy Patinkin
'644': Marc Anthony
'645': Margaret Qualley
'646': Margot Robbie
'647': Maria Sharapova
'648': Marion Cotillard
'649': Marisa Tomei
'650': Mariska Hargitay
'651': Mark Hamill
'652': Mark Ruffalo
'653': Mark Strong
'654': Mark Wahlberg
'655': Mark Zuckerberg
'656': Marlon Brando
'657': Martin Freeman
'658': Martin Scorsese
'659': Mary Elizabeth Winstead
'660': Mary J. Blige
'661': Mary Steenburgen
'662': Mary-Louise Parker
'663': Matt Bomer
'664': Matt Damon
'665': Matt LeBlanc
'666': Matt Smith
'667': Matthew Fox
'668': Matthew Goode
'669': Matthew Macfadyen
'670': Matthew McConaughey
'671': Matthew Perry
'672': Matthew Rhys
'673': Matthew Stafford
'674': Max Minghella
'675': Maya Angelou
'676': Maya Hawke
'677': Maya Rudolph
'678': Megan Fox
'679': Megan Rapinoe
'680': Meghan Markle
'681': Mel Gibson
'682': Melanie Lynskey
'683': Melissa Benoist
'684': Melissa McCarthy
'685': Melonie Diaz
'686': Meryl Streep
'687': Mia Wasikowska
'688': Michael B. Jordan
'689': Michael C. Hall
'690': Michael Caine
'691': Michael Cera
'692': Michael Cudlitz
'693': Michael Douglas
'694': Michael Ealy
'695': Michael Fassbender
'696': Michael Jordan
'697': Michael Keaton
'698': Michael Pena
'699': Michael Peña
'700': Michael Phelps
'701': Michael Shannon
'702': Michael Sheen
'703': Michael Stuhlbarg
'704': Michelle Dockery
'705': Michelle Monaghan
'706': Michelle Obama
'707': Michelle Pfeiffer
'708': Michelle Rodriguez
'709': Michelle Williams
'710': Michelle Yeoh
'711': Michiel Huisman
'712': Mila Kunis
'713': Miles Teller
'714': Milla Jovovich
'715': Millie Bobby Brown
'716': Milo Ventimiglia
'717': Mindy Kaling
'718': Miranda Cosgrove
'719': Miranda Kerr
'720': Mireille Enos
'721': Molly Ringwald
'722': Morgan Freeman
'723': Mélanie Laurent
'724': Naomi Campbell
'725': Naomi Harris
'726': Naomi Scott
'727': Naomi Watts
'728': Naomie Harris
'729': Nas
'730': Natalie Dormer
'731': Natalie Imbruglia
'732': Natalie Morales
'733': Natalie Portman
'734': Nathalie Emmanuel
'735': Nathalie Portman
'736': Nathan Fillion
'737': Naya Rivera
'738': Neil Patrick Harris
'739': Neil deGrasse Tyson
'740': Neve Campbell
'741': Neymar Jr.
'742': Nicholas Braun
'743': Nicholas Hoult
'744': Nick Jonas
'745': Nick Kroll
'746': Nick Offerman
'747': Nick Robinson
'748': Nicole Kidman
'749': Nikolaj Coster-Waldau
'750': Nina Dobrev
'751': Noah Centineo
'752': Noomi Rapace
'753': Norman Reedus
'754': Novak Djokovic
'755': Octavia Spencer
'756': Odessa Young
'757': Odette Annable
'758': Olivia Colman
'759': Olivia Cooke
'760': Olivia Holt
'761': Olivia Munn
'762': Olivia Wilde
'763': Oprah Winfrey
'764': Orlando Bloom
'765': Oscar Isaac
'766': Owen Wilson
'767': Pablo Picasso
'768': Patrick Dempsey
'769': Patrick Mahomes
'770': Patrick Stewart
'771': Patrick Wilson
'772': Paul Bettany
'773': Paul Dano
'774': Paul Giamatti
'775': Paul McCartney
'776': Paul Rudd
'777': Paul Wesley
'778': Paula Patton
'779': Pedro Almodóvar
'780': Pedro Pascal
'781': Penelope Cruz
'782': Penélope Cruz
'783': Pete Davidson
'784': Peter Dinklage
'785': Phoebe Dynevor
'786': Phoebe Waller-Bridge
'787': Pierce Brosnan
'788': Portia de Rossi
'789': Priyanka Chopra
'790': Quentin Tarantino
'791': Rachel Bilson
'792': Rachel Brosnahan
'793': Rachel McAdams
'794': Rachel Weisz
'795': Rafe Spall
'796': Rainn Wilson
'797': Ralph Fiennes
'798': Rami Malek
'799': Rashida Jones
'800': Ray Liotta
'801': Ray Romano
'802': Rebecca Ferguson
'803': Rebecca Hall
'804': Reese Witherspoon
'805': Regina Hall
'806': Regina King
'807': Renee Zellweger
'808': Renée Zellweger
'809': Rhys Ifans
'810': Ricardo Montalban
'811': Richard Armitage
'812': Richard Gere
'813': Richard Jenkins
'814': Richard Madden
'815': Ricky Gervais
'816': Ricky Martin
'817': Rihanna
'818': Riley Keough
'819': Rita Ora
'820': River Phoenix
'821': Riz Ahmed
'822': Rob Lowe
'823': Robert Carlyle
'824': Robert De Niro
'825': Robert Downey Jr.
'826': Robert Pattinson
'827': Robert Sheehan
'828': Robin Tunney
'829': Robin Williams
'830': Roger Federer
'831': Rooney Mara
'832': Rosamund Pike
'833': Rosario Dawson
'834': Rose Byrne
'835': Rose Leslie
'836': Roselyn Sanchez
'837': Ruby Rose
'838': Rupert Grint
'839': Russell Brand
'840': Russell Crowe
'841': Russell Wilson
'842': Ruth Bader Ginsburg
'843': Ruth Wilson
'844': Ryan Eggold
'845': Ryan Gosling
'846': Ryan Murphy
'847': Ryan Phillippe
'848': Ryan Reynolds
'849': Ryan Seacrest
'850': Salma Hayek
'851': Sam Claflin
'852': Sam Heughan
'853': Sam Rockwell
'854': Sam Smith
'855': Samara Weaving
'856': Samuel L. Jackson
'857': Sandra Bullock
'858': Sandra Oh
'859': Saoirse Ronan
'860': Sarah Gadon
'861': Sarah Hyland
'862': Sarah Jessica Parker
'863': Sarah Michelle Gellar
'864': Sarah Paulson
'865': Sarah Silverman
'866': Sarah Wayne Callies
'867': Sasha Alexander
'868': Scarlett Johansson
'869': Scott Speedman
'870': Sean Bean
'871': Sebastian Stan
'872': Selena Gomez
'873': Selma Blair
'874': Serena Williams
'875': Seth MacFarlane
'876': Seth Meyers
'877': Seth Rogen
'878': Shailene Woodley
'879': Shakira
'880': Shania Twain
'881': Sharlto Copley
'882': Shawn Mendes
'883': Shia LaBeouf
'884': Shiri Appleby
'885': Shohreh Aghdashloo
'886': Shonda Rhimes
'887': Sienna Miller
'888': Sigourney Weaver
'889': Simon Baker
'890': Simon Cowell
'891': Simon Pegg
'892': Simone Biles
'893': Sofia Boutella
'894': Sofia Vergara
'895': Sophie Turner
'896': Sophie Wessex
'897': Stanley Tucci
'898': Stephen Amell
'899': Stephen Colbert
'900': Stephen Curry
'901': Stephen Dorff
'902': Sterling K. Brown
'903': Sterling Knight
'904': Steve Carell
'905': Steven Yeun
'906': Susan Sarandon
'907': Taika Waititi
'908': Taraji P. Henson
'909': Taron Egerton
'910': Taylor Hill
'911': Taylor Kitsch
'912': Taylor Lautner
'913': Taylor Schilling
'914': Taylor Swift
'915': Teresa Palmer
'916': Terrence Howard
'917': Tessa Thompson
'918': Thandie Newton
'919': The Weeknd
'920': Theo James
'921': Thomas Brodie-Sangster
'922': Thomas Jane
'923': Tiger Woods
'924': Tilda Swinton
'925': Tim Burton
'926': Tim Cook
'927': Timothee Chalamet
'928': Timothy Olyphant
'929': Timothy Spall
'930': Timothée Chalamet
'931': Tina Fey
'932': Tobey Maguire
'933': Toby Jones
'934': Toby Kebbell
'935': Toby Regbo
'936': Tom Brady
'937': Tom Brokaw
'938': Tom Cavanagh
'939': Tom Cruise
'940': Tom Ellis
'941': Tom Felton
'942': Tom Hanks
'943': Tom Hardy
'944': Tom Hiddleston
'945': Tom Holland
'946': Tom Hollander
'947': Tom Hopper
'948': Tom Selleck
'949': Toni Collette
'950': Tony Hale
'951': Topher Grace
'952': Tracee Ellis Ross
'953': Tyra Banks
'954': Tyrese Gibson
'955': Uma Thurman
'956': Usain Bolt
'957': Uzo Aduba
'958': Vanessa Hudgens
'959': Vanessa Kirby
'960': Vera Farmiga
'961': Victoria Pedretti
'962': Viggo Mortensen
'963': Vin Diesel
'964': Vince Vaughn
'965': Vincent Cassel
'966': Vincent D'Onofrio
'967': Vincent Kartheiser
'968': Viola Davis
'969': Walton Goggins
'970': Wes Anderson
'971': Wes Bentley
'972': Whoopi Goldberg
'973': Will Ferrell
'974': Will Poulter
'975': Willem Dafoe
'976': William Jackson Harper
'977': William Shatner
'978': Winona Ryder
'979': Woody Harrelson
'980': Yara Shahidi
'981': Yvonne Strahovski
'982': Zac Efron
'983': Zach Braff
'984': Zach Galifianakis
'985': Zachary Levi
'986': Zachary Quinto
'987': Zayn Malik
'988': Zazie Beetz
'989': Zendaya
'990': Zoe Kazan
'991': Zoe Kravitz
'992': Zoe Saldana
'993': Zoey Deutch
'994': Zooey Deschanel
'995': Zoë Kravitz
'996': Zoë Saldana
splits:
- name: train
num_bytes: 193671657.464
num_examples: 18184
download_size: 190510261
dataset_size: 193671657.464
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Celebrity 1000
Top 1000 celebrities. 18,184 images. 256x256. Square cropped to face. |
EleutherAI/quirky_subtraction_increment0_alice | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
dataset_info:
features:
- name: alice_label
dtype: bool
- name: bob_label
dtype: bool
- name: difficulty
dtype: int64
- name: statement
dtype: string
- name: choices
sequence: string
- name: character
dtype: string
- name: label
dtype: bool
splits:
- name: train
num_bytes: 12663979.0
num_examples: 192000
- name: validation
num_bytes: 263906.0
num_examples: 4000
- name: test
num_bytes: 263762.0
num_examples: 4000
download_size: 4096814
dataset_size: 13191647.0
---
# Dataset Card for "quirky_subtraction_increment0_alice"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
PommesPeter/imelodist-sft | ---
language:
- en
license: apache-2.0
size_categories:
- 100M<n<1B
task_categories:
- text-generation
dataset_info:
features:
- name: id
dtype: int64
- name: text
dtype: string
- name: src
dtype: string
splits:
- name: train
num_bytes: 13588597055
num_examples: 5188802
download_size: 7800945420
dataset_size: 13588597055
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
tags:
- music
---
# Imelodist-sft
dataset for Imelodist Supervised Finetuning |
gigant/tib_wip | ---
dataset_info:
features:
- name: doi
dtype: string
- name: title
dtype: string
- name: url
dtype: string
- name: video_url
dtype: string
- name: license
dtype: string
- name: subject
dtype: string
- name: genre
dtype: string
- name: release_year
dtype: string
- name: author
dtype: string
- name: contributors
dtype: string
- name: abstract
dtype: string
- name: transcript
dtype: string
- name: transcript_segments
sequence:
- name: id
dtype: int32
- name: seek
dtype: int32
- name: start
dtype: float32
- name: end
dtype: float32
- name: text
dtype: string
- name: tokens
sequence: int32
- name: temperature
dtype: float32
- name: avg_logprob
dtype: float32
- name: compression_ratio
dtype: float32
- name: no_speech_prob
dtype: float32
- name: keyframes
sequence:
- name: slide
dtype: string
- name: frames
sequence: int32
- name: timestamp
sequence: float32
- name: language
dtype: string
splits:
- name: train
num_bytes: 1262918268
num_examples: 11043
download_size: 607894050
dataset_size: 1262918268
---
# Dataset Card for "tib_wip"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
nuprl/MultiPL-E | ---
annotations_creators:
- machine-generated
language:
- en
language_creators:
- machine-generated
- expert-generated
license:
- mit
multilinguality:
- monolingual
pretty_name: MultiPLE-E
size_categories:
- 1K<n<10K
source_datasets:
- original
- extended|openai_humaneval
- extended|mbpp
tags: []
task_categories: []
task_ids: []
dataset_info:
- config_name: cpp-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 217792
num_examples: 161
download_size: 248493
dataset_size: 217792
- config_name: cpp-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 239517
num_examples: 161
download_size: 270773
dataset_size: 239517
- config_name: cpp-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 239767
num_examples: 161
download_size: 271023
dataset_size: 239767
- config_name: cpp-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 198566
num_examples: 158
download_size: 227555
dataset_size: 198566
- config_name: cs-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 259874
num_examples: 158
download_size: 291137
dataset_size: 259874
- config_name: cs-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 283738
num_examples: 158
download_size: 315563
dataset_size: 283738
- config_name: cs-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 283673
num_examples: 158
download_size: 315498
dataset_size: 283673
- config_name: cs-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 237663
num_examples: 155
download_size: 267251
dataset_size: 237663
- config_name: d-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 175592
num_examples: 156
download_size: 209568
dataset_size: 175592
- config_name: d-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 181121
num_examples: 156
download_size: 215649
dataset_size: 181121
- config_name: d-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 181296
num_examples: 156
download_size: 215824
dataset_size: 181296
- config_name: d-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 157938
num_examples: 153
download_size: 190211
dataset_size: 157938
- config_name: go-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 241130
num_examples: 154
download_size: 280424
dataset_size: 241130
- config_name: go-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 247448
num_examples: 154
download_size: 287275
dataset_size: 247448
- config_name: go-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 247354
num_examples: 154
download_size: 287181
dataset_size: 247354
- config_name: go-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 221519
num_examples: 151
download_size: 258980
dataset_size: 221519
- config_name: java-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 259836
num_examples: 158
download_size: 291099
dataset_size: 259836
- config_name: java-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 286548
num_examples: 158
download_size: 318373
dataset_size: 286548
- config_name: java-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 288031
num_examples: 158
download_size: 319856
dataset_size: 288031
- config_name: java-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 237672
num_examples: 155
download_size: 267260
dataset_size: 237672
- config_name: jl-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 163708
num_examples: 159
download_size: 198696
dataset_size: 163708
- config_name: jl-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 167969
num_examples: 159
download_size: 203514
dataset_size: 167969
- config_name: jl-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 168251
num_examples: 159
download_size: 203796
dataset_size: 168251
- config_name: jl-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 145913
num_examples: 156
download_size: 179158
dataset_size: 145913
- config_name: js-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 177635
num_examples: 161
download_size: 211822
dataset_size: 177635
- config_name: js-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 181987
num_examples: 161
download_size: 216729
dataset_size: 181987
- config_name: js-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 182171
num_examples: 161
download_size: 216913
dataset_size: 182171
- config_name: js-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 158619
num_examples: 158
download_size: 191028
dataset_size: 158619
- config_name: lua-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 180398
num_examples: 161
download_size: 212511
dataset_size: 180398
- config_name: lua-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 184763
num_examples: 161
download_size: 216595
dataset_size: 184763
- config_name: lua-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 184853
num_examples: 161
download_size: 216685
dataset_size: 184853
- config_name: lua-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 161339
num_examples: 158
download_size: 191690
dataset_size: 161339
- config_name: php-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 219526
num_examples: 161
download_size: 256134
dataset_size: 219526
- config_name: php-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 225575
num_examples: 161
download_size: 262738
dataset_size: 225575
- config_name: php-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 225730
num_examples: 161
download_size: 262893
dataset_size: 225730
- config_name: php-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 200047
num_examples: 158
download_size: 234848
dataset_size: 200047
- config_name: pl-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 239874
num_examples: 161
download_size: 279351
dataset_size: 239874
- config_name: pl-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 243611
num_examples: 161
download_size: 283767
dataset_size: 243611
- config_name: pl-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 243661
num_examples: 161
download_size: 283817
dataset_size: 243661
- config_name: pl-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 220817
num_examples: 158
download_size: 258463
dataset_size: 220817
- config_name: py-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 173537
num_examples: 161
download_size: 207009
dataset_size: 173537
- config_name: py-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 177787
num_examples: 161
download_size: 210975
dataset_size: 177787
- config_name: py-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 177787
num_examples: 161
download_size: 210975
dataset_size: 177787
- config_name: py-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 155389
num_examples: 158
download_size: 187068
dataset_size: 155389
- config_name: r-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 186803
num_examples: 161
download_size: 215857
dataset_size: 186803
- config_name: r-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 191732
num_examples: 161
download_size: 220505
dataset_size: 191732
- config_name: r-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 191747
num_examples: 161
download_size: 220520
dataset_size: 191747
- config_name: r-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 168422
num_examples: 158
download_size: 195771
dataset_size: 168422
- config_name: rb-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 181999
num_examples: 161
download_size: 216186
dataset_size: 181999
- config_name: rb-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 188317
num_examples: 161
download_size: 223059
dataset_size: 188317
- config_name: rb-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 188457
num_examples: 161
download_size: 223199
dataset_size: 188457
- config_name: rb-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 163569
num_examples: 158
download_size: 195978
dataset_size: 163569
- config_name: rkt-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 177757
num_examples: 161
download_size: 212266
dataset_size: 177757
- config_name: rkt-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 182937
num_examples: 161
download_size: 218001
dataset_size: 182937
- config_name: rkt-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 182754
num_examples: 161
download_size: 217818
dataset_size: 182754
- config_name: rkt-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 158729
num_examples: 158
download_size: 191454
dataset_size: 158729
- config_name: rs-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 177191
num_examples: 156
download_size: 206604
dataset_size: 177191
- config_name: rs-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 188587
num_examples: 156
download_size: 218555
dataset_size: 188587
- config_name: rs-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 188841
num_examples: 156
download_size: 218809
dataset_size: 188841
- config_name: rs-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 158191
num_examples: 153
download_size: 185991
dataset_size: 158191
- config_name: scala-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 222118
num_examples: 160
download_size: 253027
dataset_size: 222118
- config_name: scala-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 240540
num_examples: 160
download_size: 272012
dataset_size: 240540
- config_name: scala-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 240466
num_examples: 160
download_size: 271938
dataset_size: 240466
- config_name: scala-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 200261
num_examples: 157
download_size: 229477
dataset_size: 200261
- config_name: sh-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 158460
num_examples: 158
download_size: 193268
dataset_size: 158460
- config_name: sh-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 164552
num_examples: 158
download_size: 201631
dataset_size: 164552
- config_name: sh-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 164521
num_examples: 158
download_size: 201600
dataset_size: 164521
- config_name: sh-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 140720
num_examples: 155
download_size: 173767
dataset_size: 140720
- config_name: swift-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 201798
num_examples: 161
download_size: 233903
dataset_size: 201798
- config_name: swift-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 204760
num_examples: 158
download_size: 236660
dataset_size: 204760
- config_name: swift-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 204920
num_examples: 158
download_size: 236820
dataset_size: 204920
- config_name: swift-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 181681
num_examples: 158
download_size: 212047
dataset_size: 181681
- config_name: ts-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 181763
num_examples: 159
download_size: 215589
dataset_size: 181763
- config_name: ts-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 186037
num_examples: 159
download_size: 220423
dataset_size: 186037
- config_name: ts-reworded
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 186215
num_examples: 159
download_size: 220601
dataset_size: 186215
- config_name: ts-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 162881
num_examples: 156
download_size: 194985
dataset_size: 162881
- config_name: cpp
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 239767
num_examples: 161
download_size: 271023
dataset_size: 239767
- config_name: cs
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 283673
num_examples: 158
download_size: 315498
dataset_size: 283673
- config_name: d
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 181296
num_examples: 156
download_size: 215824
dataset_size: 181296
- config_name: go
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 247354
num_examples: 154
download_size: 287181
dataset_size: 247354
- config_name: java
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 288031
num_examples: 158
download_size: 319856
dataset_size: 288031
- config_name: jl
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 168251
num_examples: 159
download_size: 203796
dataset_size: 168251
- config_name: js
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 182171
num_examples: 161
download_size: 216913
dataset_size: 182171
- config_name: lua
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 184853
num_examples: 161
download_size: 216685
dataset_size: 184853
- config_name: php
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 225730
num_examples: 161
download_size: 262893
dataset_size: 225730
- config_name: pl
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 243661
num_examples: 161
download_size: 283817
dataset_size: 243661
- config_name: py
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 177787
num_examples: 161
download_size: 210975
dataset_size: 177787
- config_name: r
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 191747
num_examples: 161
download_size: 220520
dataset_size: 191747
- config_name: rb
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 188457
num_examples: 161
download_size: 223199
dataset_size: 188457
- config_name: rkt
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 182754
num_examples: 161
download_size: 217818
dataset_size: 182754
- config_name: rs
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 188841
num_examples: 156
download_size: 218809
dataset_size: 188841
- config_name: scala
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 240466
num_examples: 160
download_size: 271938
dataset_size: 240466
- config_name: sh
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 164521
num_examples: 158
download_size: 201600
dataset_size: 164521
- config_name: swift
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 204920
num_examples: 158
download_size: 236820
dataset_size: 204920
- config_name: ts
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 186215
num_examples: 159
download_size: 220601
dataset_size: 186215
- config_name: humaneval-cpp-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 218990
num_examples: 161
download_size: 249691
dataset_size: 218990
- config_name: humaneval-cpp-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 240786
num_examples: 161
download_size: 272042
dataset_size: 240786
- config_name: humaneval-cpp
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 241036
num_examples: 161
download_size: 272292
dataset_size: 241036
- config_name: humaneval-cpp-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 199746
num_examples: 158
download_size: 228735
dataset_size: 199746
- config_name: humaneval-cs-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 260822
num_examples: 158
download_size: 292085
dataset_size: 260822
- config_name: humaneval-cs-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 284686
num_examples: 158
download_size: 316511
dataset_size: 284686
- config_name: humaneval-cs
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 284621
num_examples: 158
download_size: 316446
dataset_size: 284621
- config_name: humaneval-cs-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 238593
num_examples: 155
download_size: 268181
dataset_size: 238593
- config_name: humaneval-d-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 176864
num_examples: 156
download_size: 210856
dataset_size: 176864
- config_name: humaneval-d-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 182057
num_examples: 156
download_size: 216585
dataset_size: 182057
- config_name: humaneval-d
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 182232
num_examples: 156
download_size: 216760
dataset_size: 182232
- config_name: humaneval-d-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 158856
num_examples: 153
download_size: 191129
dataset_size: 158856
- config_name: humaneval-go-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 242054
num_examples: 154
download_size: 281348
dataset_size: 242054
- config_name: humaneval-go-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 248372
num_examples: 154
download_size: 288199
dataset_size: 248372
- config_name: humaneval-go
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 248278
num_examples: 154
download_size: 288105
dataset_size: 248278
- config_name: humaneval-go-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 222425
num_examples: 151
download_size: 259886
dataset_size: 222425
- config_name: humaneval-java-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 261057
num_examples: 158
download_size: 292320
dataset_size: 261057
- config_name: humaneval-java-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 287860
num_examples: 158
download_size: 319685
dataset_size: 287860
- config_name: humaneval-java
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 289343
num_examples: 158
download_size: 321168
dataset_size: 289343
- config_name: humaneval-java-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 238875
num_examples: 155
download_size: 268463
dataset_size: 238875
- config_name: humaneval-jl-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 164664
num_examples: 159
download_size: 199654
dataset_size: 164664
- config_name: humaneval-jl-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 168925
num_examples: 159
download_size: 204472
dataset_size: 168925
- config_name: humaneval-jl
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 169207
num_examples: 159
download_size: 204754
dataset_size: 169207
- config_name: humaneval-jl-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 146851
num_examples: 156
download_size: 180098
dataset_size: 146851
- config_name: humaneval-js-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 178601
num_examples: 161
download_size: 212788
dataset_size: 178601
- config_name: humaneval-js-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 182953
num_examples: 161
download_size: 217695
dataset_size: 182953
- config_name: humaneval-js
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 183137
num_examples: 161
download_size: 217879
dataset_size: 183137
- config_name: humaneval-js-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 159567
num_examples: 158
download_size: 191976
dataset_size: 159567
- config_name: humaneval-lua-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 181364
num_examples: 161
download_size: 213477
dataset_size: 181364
- config_name: humaneval-lua-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 185729
num_examples: 161
download_size: 217561
dataset_size: 185729
- config_name: humaneval-lua
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 185819
num_examples: 161
download_size: 217651
dataset_size: 185819
- config_name: humaneval-lua-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 162287
num_examples: 158
download_size: 192638
dataset_size: 162287
- config_name: humaneval-php-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 220492
num_examples: 161
download_size: 257100
dataset_size: 220492
- config_name: humaneval-php-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 226541
num_examples: 161
download_size: 263704
dataset_size: 226541
- config_name: humaneval-php
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 226696
num_examples: 161
download_size: 263859
dataset_size: 226696
- config_name: humaneval-php-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 200995
num_examples: 158
download_size: 235796
dataset_size: 200995
- config_name: humaneval-pl-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 240840
num_examples: 161
download_size: 280317
dataset_size: 240840
- config_name: humaneval-pl-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 244577
num_examples: 161
download_size: 284733
dataset_size: 244577
- config_name: humaneval-pl
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 244627
num_examples: 161
download_size: 284783
dataset_size: 244627
- config_name: humaneval-pl-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 221765
num_examples: 158
download_size: 259411
dataset_size: 221765
- config_name: humaneval-py-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 174503
num_examples: 161
download_size: 207975
dataset_size: 174503
- config_name: humaneval-py-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 178753
num_examples: 161
download_size: 211941
dataset_size: 178753
- config_name: humaneval-py
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 178753
num_examples: 161
download_size: 211941
dataset_size: 178753
- config_name: humaneval-py-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 156337
num_examples: 158
download_size: 188016
dataset_size: 156337
- config_name: humaneval-r-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 186140
num_examples: 161
download_size: 215194
dataset_size: 186140
- config_name: humaneval-r-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 190637
num_examples: 161
download_size: 219410
dataset_size: 190637
- config_name: humaneval-r
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 190652
num_examples: 161
download_size: 219425
dataset_size: 190652
- config_name: humaneval-r-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 167742
num_examples: 158
download_size: 195091
dataset_size: 167742
- config_name: humaneval-rb-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 182965
num_examples: 161
download_size: 217152
dataset_size: 182965
- config_name: humaneval-rb-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 189283
num_examples: 161
download_size: 224025
dataset_size: 189283
- config_name: humaneval-rb
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 189423
num_examples: 161
download_size: 224165
dataset_size: 189423
- config_name: humaneval-rb-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 164517
num_examples: 158
download_size: 196926
dataset_size: 164517
- config_name: humaneval-rkt-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 185503
num_examples: 161
download_size: 220012
dataset_size: 185503
- config_name: humaneval-rkt-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 190683
num_examples: 161
download_size: 225747
dataset_size: 190683
- config_name: humaneval-rkt
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 190500
num_examples: 161
download_size: 225564
dataset_size: 190500
- config_name: humaneval-rkt-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 166379
num_examples: 158
download_size: 199104
dataset_size: 166379
- config_name: humaneval-rs-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 178127
num_examples: 156
download_size: 207540
dataset_size: 178127
- config_name: humaneval-rs-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 189523
num_examples: 156
download_size: 219491
dataset_size: 189523
- config_name: humaneval-rs
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 189777
num_examples: 156
download_size: 219745
dataset_size: 189777
- config_name: humaneval-rs-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 159109
num_examples: 153
download_size: 186909
dataset_size: 159109
- config_name: humaneval-scala-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 223078
num_examples: 160
download_size: 253987
dataset_size: 223078
- config_name: humaneval-scala-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 241500
num_examples: 160
download_size: 272972
dataset_size: 241500
- config_name: humaneval-scala
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 241426
num_examples: 160
download_size: 272898
dataset_size: 241426
- config_name: humaneval-scala-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 201203
num_examples: 157
download_size: 230419
dataset_size: 201203
- config_name: humaneval-sh-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 159408
num_examples: 158
download_size: 194216
dataset_size: 159408
- config_name: humaneval-sh-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 165500
num_examples: 158
download_size: 202579
dataset_size: 165500
- config_name: humaneval-sh
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 165469
num_examples: 158
download_size: 202548
dataset_size: 165469
- config_name: humaneval-sh-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 141650
num_examples: 155
download_size: 174697
dataset_size: 141650
- config_name: humaneval-swift-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 202764
num_examples: 161
download_size: 234869
dataset_size: 202764
- config_name: humaneval-swift-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 205708
num_examples: 158
download_size: 237608
dataset_size: 205708
- config_name: humaneval-swift
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 205868
num_examples: 158
download_size: 237768
dataset_size: 205868
- config_name: humaneval-swift-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 182629
num_examples: 158
download_size: 212995
dataset_size: 182629
- config_name: humaneval-ts-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 182717
num_examples: 159
download_size: 216543
dataset_size: 182717
- config_name: humaneval-ts-transform
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 186991
num_examples: 159
download_size: 221377
dataset_size: 186991
- config_name: humaneval-ts
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 187169
num_examples: 159
download_size: 221555
dataset_size: 187169
- config_name: humaneval-ts-remove
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 163817
num_examples: 156
download_size: 195921
dataset_size: 163817
- config_name: mbpp-cpp-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 360057
num_examples: 397
download_size: 428174
dataset_size: 360057
- config_name: mbpp-cpp
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 362541
num_examples: 397
download_size: 430658
dataset_size: 362541
- config_name: mbpp-cs-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 416276
num_examples: 386
download_size: 484875
dataset_size: 416276
- config_name: mbpp-cs
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 418156
num_examples: 386
download_size: 486755
dataset_size: 418156
- config_name: mbpp-d-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 232820
num_examples: 358
download_size: 303807
dataset_size: 232820
- config_name: mbpp-d
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 234776
num_examples: 358
download_size: 305763
dataset_size: 234776
- config_name: mbpp-go-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 399157
num_examples: 374
download_size: 486803
dataset_size: 399157
- config_name: mbpp-go
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 400841
num_examples: 374
download_size: 488487
dataset_size: 400841
- config_name: mbpp-java-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 419406
num_examples: 386
download_size: 488005
dataset_size: 419406
- config_name: mbpp-java
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 423652
num_examples: 386
download_size: 492251
dataset_size: 423652
- config_name: mbpp-jl-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 228259
num_examples: 390
download_size: 305322
dataset_size: 228259
- config_name: mbpp-jl
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 230672
num_examples: 390
download_size: 307735
dataset_size: 230672
- config_name: mbpp-js-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 256499
num_examples: 397
download_size: 333225
dataset_size: 256499
- config_name: mbpp-js
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 258734
num_examples: 397
download_size: 335460
dataset_size: 258734
- config_name: mbpp-lua-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 262378
num_examples: 397
download_size: 335520
dataset_size: 262378
- config_name: mbpp-lua
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 264635
num_examples: 397
download_size: 337777
dataset_size: 264635
- config_name: mbpp-php-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 308918
num_examples: 397
download_size: 388541
dataset_size: 308918
- config_name: mbpp-php
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 311263
num_examples: 397
download_size: 390886
dataset_size: 311263
- config_name: mbpp-pl-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 321045
num_examples: 396
download_size: 402353
dataset_size: 321045
- config_name: mbpp-pl
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 323224
num_examples: 396
download_size: 404532
dataset_size: 323224
- config_name: mbpp-py-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 253037
num_examples: 397
download_size: 330230
dataset_size: 253037
- config_name: mbpp-py
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 255022
num_examples: 397
download_size: 332215
dataset_size: 255022
- config_name: mbpp-r-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 257698
num_examples: 397
download_size: 323297
dataset_size: 257698
- config_name: mbpp-r
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 259514
num_examples: 397
download_size: 325113
dataset_size: 259514
- config_name: mbpp-rb-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 266702
num_examples: 397
download_size: 343428
dataset_size: 266702
- config_name: mbpp-rb
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 268881
num_examples: 397
download_size: 345607
dataset_size: 268881
- config_name: mbpp-rkt-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 269019
num_examples: 397
download_size: 346539
dataset_size: 269019
- config_name: mbpp-rkt
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 270933
num_examples: 397
download_size: 348453
dataset_size: 270933
- config_name: mbpp-rs-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 218020
num_examples: 354
download_size: 277268
dataset_size: 218020
- config_name: mbpp-rs
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 220113
num_examples: 354
download_size: 279361
dataset_size: 220113
- config_name: mbpp-scala-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 330435
num_examples: 396
download_size: 399451
dataset_size: 330435
- config_name: mbpp-scala
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 332677
num_examples: 396
download_size: 401693
dataset_size: 332677
- config_name: mbpp-sh-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 217246
num_examples: 382
download_size: 289241
dataset_size: 217246
- config_name: mbpp-sh
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 219035
num_examples: 382
download_size: 291030
dataset_size: 219035
- config_name: mbpp-swift-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 317271
num_examples: 396
download_size: 388726
dataset_size: 317271
- config_name: mbpp-swift
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 319946
num_examples: 396
download_size: 391401
dataset_size: 319946
- config_name: mbpp-ts-keep
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 265973
num_examples: 390
download_size: 341007
dataset_size: 265973
- config_name: mbpp-ts
features:
- name: name
dtype: string
- name: language
dtype: string
- name: prompt
dtype: string
- name: doctests
dtype: string
- name: original
dtype: string
- name: prompt_terminology
dtype: string
- name: tests
dtype: string
- name: stop_tokens
sequence: string
splits:
- name: test
num_bytes: 268179
num_examples: 390
download_size: 343213
dataset_size: 268179
---
# Dataset Card for MultiPL-E
## Dataset Description
- **Homepage:** https://nuprl.github.io/MultiPL-E/
- **Repository:** https://github.com/nuprl/MultiPL-E
- **Paper:** https://ieeexplore.ieee.org/abstract/document/10103177
- **Point of Contact:** carolyn.anderson@wellesley.edu, mfeldman@oberlin.edu, a.guha@northeastern.edu
## Dataset Summary
MultiPL-E is a dataset for evaluating large language models for code
generation that supports 18 programming languages. It takes the OpenAI
"HumanEval" and the MBPP Python benchmarks and uses little compilers to
translate them to other languages. It is easy to add support for new languages
and benchmarks.
## Subsets
For most purposes, you should use the variations called *SRCDATA-LANG*, where
*SRCDATA* is either "humaneval" or "mbpp" and *LANG* is one of the supported
languages. We use the canonical file extension for each language to identify
the language, e.g., "py" for Python, "cpp" for C++, "lua" for Lua, and so on.
We also provide a few other variations:
- *SRCDATA-LANG-keep* is the same as *SRCDATA-LANG*, but the text of the prompt
is totally unchanged. If the original prompt had Python doctests, they remain
as Python instead of being translated to *LANG*. If the original prompt had
Python-specific terminology, e.g., "list", it remains "list", instead of
being translated, e.g., to "vector" for C++.
- *SRCDATA-LANG-transform* transforms the doctests to *LANG* but leaves
the natural language text of the prompt unchanged.
- *SRCDATA-LANG-removed* removes the doctests from the prompt.
Note that MBPP does not have any doctests, so the "removed" and "transform"
variations are not available for MBPP.
## Example
The following script uses the Salesforce/codegen model to generate Lua
and MultiPL-E to produce a script with unit tests for luaunit.
```python
import datasets
from transformers import AutoTokenizer, AutoModelForCausalLM
LANG = "lua"
MODEL_NAME = "Salesforce/codegen-350M-multi"
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME).half().cuda()
problems = datasets.load_dataset("nuprl/MultiPL-E", f"humaneval-{LANG}")
def stop_at_stop_token(decoded_string, problem):
"""
Truncates the output at stop tokens, taking care to skip the prompt
which may have stop tokens.
"""
min_stop_index = len(decoded_string)
for stop_token in problem["stop_tokens"]:
stop_index = decoded_string.find(stop_token)
if stop_index != -1 and stop_index > len(problem["prompt"]) and stop_index < min_stop_index:
min_stop_index = stop_index
return decoded_string[:min_stop_index]
for problem in problems["test"]:
input_ids = tokenizer(
problem["prompt"],
return_tensors="pt",
).input_ids.cuda()
generated_ids = model.generate(
input_ids, max_length=512, pad_token_id=tokenizer.eos_token_id + 2
)
truncated_string = stop_at_stop_token(tokenizer.decode(generated_ids[0]), problem)
filename = problem["name"] + "." + LANG
with open(filename, "w") as f:
print(f"Created {filename}")
f.write(truncated_string)
f.write("\n")
f.write(problem["tests"])
``` |
Ddream-ai/InsuranceCorpus | ---
license: mit
dataset_info:
features:
- name: 咨询
dtype: string
- name: 回复
dtype: string
splits:
- name: train
num_bytes: 3612350
num_examples: 3599
- name: validation
num_bytes: 186138
num_examples: 189
download_size: 2267366
dataset_size: 3798488
---
|
joey234/mmlu-security_studies-original-neg | ---
dataset_info:
features:
- name: question
dtype: string
- name: choices
sequence: string
- name: answer
dtype:
class_label:
names:
'0': A
'1': B
'2': C
'3': D
splits:
- name: test
num_bytes: 30097.763265306123
num_examples: 36
download_size: 14397
dataset_size: 30097.763265306123
---
# Dataset Card for "mmlu-security_studies-original-neg"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
open-llm-leaderboard/details_MTSAIR__MultiVerse_70B | ---
pretty_name: Evaluation run of MTSAIR/MultiVerse_70B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [MTSAIR/MultiVerse_70B](https://huggingface.co/MTSAIR/MultiVerse_70B) on the [Open\
\ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_MTSAIR__MultiVerse_70B\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-03-28T11:20:18.515649](https://huggingface.co/datasets/open-llm-leaderboard/details_MTSAIR__MultiVerse_70B/blob/main/results_2024-03-28T11-20-18.515649.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.7830598805961457,\n\
\ \"acc_stderr\": 0.027571176888417693,\n \"acc_norm\": 0.7846693275609173,\n\
\ \"acc_norm_stderr\": 0.028121251178584224,\n \"mc1\": 0.6499388004895961,\n\
\ \"mc1_stderr\": 0.016697949420151022,\n \"mc2\": 0.7508968077654237,\n\
\ \"mc2_stderr\": 0.014534916537858438\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.7636518771331058,\n \"acc_stderr\": 0.012414960524301822,\n\
\ \"acc_norm\": 0.7858361774744027,\n \"acc_norm_stderr\": 0.01198838320596649\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7490539733120892,\n\
\ \"acc_stderr\": 0.0043267144532667355,\n \"acc_norm\": 0.8974307906791475,\n\
\ \"acc_norm_stderr\": 0.0030277534195929483\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \
\ \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.7333333333333333,\n\
\ \"acc_stderr\": 0.038201699145179055,\n \"acc_norm\": 0.7333333333333333,\n\
\ \"acc_norm_stderr\": 0.038201699145179055\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.8881578947368421,\n \"acc_stderr\": 0.02564834125169361,\n\
\ \"acc_norm\": 0.8881578947368421,\n \"acc_norm_stderr\": 0.02564834125169361\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.79,\n\
\ \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.79,\n \
\ \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.8528301886792453,\n \"acc_stderr\": 0.02180412613479737,\n\
\ \"acc_norm\": 0.8528301886792453,\n \"acc_norm_stderr\": 0.02180412613479737\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.9236111111111112,\n\
\ \"acc_stderr\": 0.022212203938345918,\n \"acc_norm\": 0.9236111111111112,\n\
\ \"acc_norm_stderr\": 0.022212203938345918\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \
\ \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620332\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.64,\n\
\ \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \
\ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.7630057803468208,\n\
\ \"acc_stderr\": 0.032424147574830975,\n \"acc_norm\": 0.7630057803468208,\n\
\ \"acc_norm_stderr\": 0.032424147574830975\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.5294117647058824,\n \"acc_stderr\": 0.049665709039785295,\n\
\ \"acc_norm\": 0.5294117647058824,\n \"acc_norm_stderr\": 0.049665709039785295\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.84,\n\
\ \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.8468085106382979,\n \"acc_stderr\": 0.023545179061675203,\n\
\ \"acc_norm\": 0.8468085106382979,\n \"acc_norm_stderr\": 0.023545179061675203\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5964912280701754,\n\
\ \"acc_stderr\": 0.04615186962583706,\n \"acc_norm\": 0.5964912280701754,\n\
\ \"acc_norm_stderr\": 0.04615186962583706\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.8,\n \"acc_stderr\": 0.0333333333333333,\n \
\ \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.0333333333333333\n },\n\
\ \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.6957671957671958,\n\
\ \"acc_stderr\": 0.023695415009463087,\n \"acc_norm\": 0.6957671957671958,\n\
\ \"acc_norm_stderr\": 0.023695415009463087\n },\n \"harness|hendrycksTest-formal_logic|5\"\
: {\n \"acc\": 0.5476190476190477,\n \"acc_stderr\": 0.044518079590553275,\n\
\ \"acc_norm\": 0.5476190476190477,\n \"acc_norm_stderr\": 0.044518079590553275\n\
\ },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.58,\n\
\ \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \
\ \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_biology|5\"\
: {\n \"acc\": 0.8870967741935484,\n \"acc_stderr\": 0.018003603325863614,\n\
\ \"acc_norm\": 0.8870967741935484,\n \"acc_norm_stderr\": 0.018003603325863614\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.7142857142857143,\n \"acc_stderr\": 0.03178529710642751,\n \"\
acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.03178529710642751\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\"\
: 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.8666666666666667,\n \"acc_stderr\": 0.026544435312706467,\n\
\ \"acc_norm\": 0.8666666666666667,\n \"acc_norm_stderr\": 0.026544435312706467\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.9292929292929293,\n \"acc_stderr\": 0.01826310542019948,\n \"\
acc_norm\": 0.9292929292929293,\n \"acc_norm_stderr\": 0.01826310542019948\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.9844559585492227,\n \"acc_stderr\": 0.008927492715084334,\n\
\ \"acc_norm\": 0.9844559585492227,\n \"acc_norm_stderr\": 0.008927492715084334\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.8025641025641026,\n \"acc_stderr\": 0.02018264696867485,\n \
\ \"acc_norm\": 0.8025641025641026,\n \"acc_norm_stderr\": 0.02018264696867485\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.5037037037037037,\n \"acc_stderr\": 0.03048470166508437,\n \
\ \"acc_norm\": 0.5037037037037037,\n \"acc_norm_stderr\": 0.03048470166508437\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.023005459446673936,\n\
\ \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.023005459446673936\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.5894039735099338,\n \"acc_stderr\": 0.04016689594849928,\n \"\
acc_norm\": 0.5894039735099338,\n \"acc_norm_stderr\": 0.04016689594849928\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.9357798165137615,\n \"acc_stderr\": 0.010510494713201405,\n \"\
acc_norm\": 0.9357798165137615,\n \"acc_norm_stderr\": 0.010510494713201405\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.7129629629629629,\n \"acc_stderr\": 0.03085199299325701,\n \"\
acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.03085199299325701\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.9215686274509803,\n \"acc_stderr\": 0.018869514646658928,\n \"\
acc_norm\": 0.9215686274509803,\n \"acc_norm_stderr\": 0.018869514646658928\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.9113924050632911,\n \"acc_stderr\": 0.018498315206865384,\n \
\ \"acc_norm\": 0.9113924050632911,\n \"acc_norm_stderr\": 0.018498315206865384\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8161434977578476,\n\
\ \"acc_stderr\": 0.025998379092356513,\n \"acc_norm\": 0.8161434977578476,\n\
\ \"acc_norm_stderr\": 0.025998379092356513\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.8549618320610687,\n \"acc_stderr\": 0.030884661089515375,\n\
\ \"acc_norm\": 0.8549618320610687,\n \"acc_norm_stderr\": 0.030884661089515375\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.8925619834710744,\n \"acc_stderr\": 0.028268812192540616,\n \"\
acc_norm\": 0.8925619834710744,\n \"acc_norm_stderr\": 0.028268812192540616\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8703703703703703,\n\
\ \"acc_stderr\": 0.03247224389917947,\n \"acc_norm\": 0.8703703703703703,\n\
\ \"acc_norm_stderr\": 0.03247224389917947\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.8773006134969326,\n \"acc_stderr\": 0.025777328426978927,\n\
\ \"acc_norm\": 0.8773006134969326,\n \"acc_norm_stderr\": 0.025777328426978927\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.7053571428571429,\n\
\ \"acc_stderr\": 0.0432704093257873,\n \"acc_norm\": 0.7053571428571429,\n\
\ \"acc_norm_stderr\": 0.0432704093257873\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.8737864077669902,\n \"acc_stderr\": 0.03288180278808628,\n\
\ \"acc_norm\": 0.8737864077669902,\n \"acc_norm_stderr\": 0.03288180278808628\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9401709401709402,\n\
\ \"acc_stderr\": 0.015537514263253874,\n \"acc_norm\": 0.9401709401709402,\n\
\ \"acc_norm_stderr\": 0.015537514263253874\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896308,\n \
\ \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896308\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.9233716475095786,\n\
\ \"acc_stderr\": 0.00951217069932386,\n \"acc_norm\": 0.9233716475095786,\n\
\ \"acc_norm_stderr\": 0.00951217069932386\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.8497109826589595,\n \"acc_stderr\": 0.01923931878390472,\n\
\ \"acc_norm\": 0.8497109826589595,\n \"acc_norm_stderr\": 0.01923931878390472\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.823463687150838,\n\
\ \"acc_stderr\": 0.012751770640520499,\n \"acc_norm\": 0.823463687150838,\n\
\ \"acc_norm_stderr\": 0.012751770640520499\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.8562091503267973,\n \"acc_stderr\": 0.020091188936043725,\n\
\ \"acc_norm\": 0.8562091503267973,\n \"acc_norm_stderr\": 0.020091188936043725\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.842443729903537,\n\
\ \"acc_stderr\": 0.020692237273583984,\n \"acc_norm\": 0.842443729903537,\n\
\ \"acc_norm_stderr\": 0.020692237273583984\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.8734567901234568,\n \"acc_stderr\": 0.018498600558790906,\n\
\ \"acc_norm\": 0.8734567901234568,\n \"acc_norm_stderr\": 0.018498600558790906\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.6631205673758865,\n \"acc_stderr\": 0.02819553487396673,\n \
\ \"acc_norm\": 0.6631205673758865,\n \"acc_norm_stderr\": 0.02819553487396673\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.6323337679269883,\n\
\ \"acc_stderr\": 0.012314845910071703,\n \"acc_norm\": 0.6323337679269883,\n\
\ \"acc_norm_stderr\": 0.012314845910071703\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.02151396405285963,\n\
\ \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.02151396405285963\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.8300653594771242,\n \"acc_stderr\": 0.015194153113184724,\n \
\ \"acc_norm\": 0.8300653594771242,\n \"acc_norm_stderr\": 0.015194153113184724\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7818181818181819,\n\
\ \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.7818181818181819,\n\
\ \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.8163265306122449,\n \"acc_stderr\": 0.024789071332007646,\n\
\ \"acc_norm\": 0.8163265306122449,\n \"acc_norm_stderr\": 0.024789071332007646\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8855721393034826,\n\
\ \"acc_stderr\": 0.022509345325101716,\n \"acc_norm\": 0.8855721393034826,\n\
\ \"acc_norm_stderr\": 0.022509345325101716\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.95,\n \"acc_stderr\": 0.021904291355759026,\n \
\ \"acc_norm\": 0.95,\n \"acc_norm_stderr\": 0.021904291355759026\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n\
\ \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n\
\ \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8713450292397661,\n \"acc_stderr\": 0.025679342723276894,\n\
\ \"acc_norm\": 0.8713450292397661,\n \"acc_norm_stderr\": 0.025679342723276894\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.6499388004895961,\n\
\ \"mc1_stderr\": 0.016697949420151022,\n \"mc2\": 0.7508968077654237,\n\
\ \"mc2_stderr\": 0.014534916537858438\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.8737174427782163,\n \"acc_stderr\": 0.009335559129908452\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7680060652009097,\n \
\ \"acc_stderr\": 0.011626873175092412\n }\n}\n```"
repo_url: https://huggingface.co/MTSAIR/MultiVerse_70B
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|arc:challenge|25_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|arc:challenge|25_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|gsm8k|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|gsm8k|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hellaswag|10_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hellaswag|10_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-28T11-18-17.303683.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-28T11-20-18.515649.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-28T11-20-18.515649.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- '**/details_harness|winogrande|5_2024-03-28T11-18-17.303683.parquet'
- split: 2024_03_28T11_20_18.515649
path:
- '**/details_harness|winogrande|5_2024-03-28T11-20-18.515649.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-03-28T11-20-18.515649.parquet'
- config_name: results
data_files:
- split: 2024_03_28T11_18_17.303683
path:
- results_2024-03-28T11-18-17.303683.parquet
- split: 2024_03_28T11_20_18.515649
path:
- results_2024-03-28T11-20-18.515649.parquet
- split: latest
path:
- results_2024-03-28T11-20-18.515649.parquet
---
# Dataset Card for Evaluation run of MTSAIR/MultiVerse_70B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [MTSAIR/MultiVerse_70B](https://huggingface.co/MTSAIR/MultiVerse_70B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_MTSAIR__MultiVerse_70B",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-03-28T11:20:18.515649](https://huggingface.co/datasets/open-llm-leaderboard/details_MTSAIR__MultiVerse_70B/blob/main/results_2024-03-28T11-20-18.515649.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.7830598805961457,
"acc_stderr": 0.027571176888417693,
"acc_norm": 0.7846693275609173,
"acc_norm_stderr": 0.028121251178584224,
"mc1": 0.6499388004895961,
"mc1_stderr": 0.016697949420151022,
"mc2": 0.7508968077654237,
"mc2_stderr": 0.014534916537858438
},
"harness|arc:challenge|25": {
"acc": 0.7636518771331058,
"acc_stderr": 0.012414960524301822,
"acc_norm": 0.7858361774744027,
"acc_norm_stderr": 0.01198838320596649
},
"harness|hellaswag|10": {
"acc": 0.7490539733120892,
"acc_stderr": 0.0043267144532667355,
"acc_norm": 0.8974307906791475,
"acc_norm_stderr": 0.0030277534195929483
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.45,
"acc_stderr": 0.049999999999999996,
"acc_norm": 0.45,
"acc_norm_stderr": 0.049999999999999996
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.7333333333333333,
"acc_stderr": 0.038201699145179055,
"acc_norm": 0.7333333333333333,
"acc_norm_stderr": 0.038201699145179055
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.8881578947368421,
"acc_stderr": 0.02564834125169361,
"acc_norm": 0.8881578947368421,
"acc_norm_stderr": 0.02564834125169361
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.8528301886792453,
"acc_stderr": 0.02180412613479737,
"acc_norm": 0.8528301886792453,
"acc_norm_stderr": 0.02180412613479737
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.9236111111111112,
"acc_stderr": 0.022212203938345918,
"acc_norm": 0.9236111111111112,
"acc_norm_stderr": 0.022212203938345918
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.64,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.64,
"acc_norm_stderr": 0.048241815132442176
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.7630057803468208,
"acc_stderr": 0.032424147574830975,
"acc_norm": 0.7630057803468208,
"acc_norm_stderr": 0.032424147574830975
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.5294117647058824,
"acc_stderr": 0.049665709039785295,
"acc_norm": 0.5294117647058824,
"acc_norm_stderr": 0.049665709039785295
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.84,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.84,
"acc_norm_stderr": 0.03684529491774709
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.8468085106382979,
"acc_stderr": 0.023545179061675203,
"acc_norm": 0.8468085106382979,
"acc_norm_stderr": 0.023545179061675203
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5964912280701754,
"acc_stderr": 0.04615186962583706,
"acc_norm": 0.5964912280701754,
"acc_norm_stderr": 0.04615186962583706
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.8,
"acc_stderr": 0.0333333333333333,
"acc_norm": 0.8,
"acc_norm_stderr": 0.0333333333333333
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.6957671957671958,
"acc_stderr": 0.023695415009463087,
"acc_norm": 0.6957671957671958,
"acc_norm_stderr": 0.023695415009463087
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.5476190476190477,
"acc_stderr": 0.044518079590553275,
"acc_norm": 0.5476190476190477,
"acc_norm_stderr": 0.044518079590553275
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.58,
"acc_stderr": 0.049604496374885836,
"acc_norm": 0.58,
"acc_norm_stderr": 0.049604496374885836
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.8870967741935484,
"acc_stderr": 0.018003603325863614,
"acc_norm": 0.8870967741935484,
"acc_norm_stderr": 0.018003603325863614
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.7142857142857143,
"acc_stderr": 0.03178529710642751,
"acc_norm": 0.7142857142857143,
"acc_norm_stderr": 0.03178529710642751
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.79,
"acc_stderr": 0.040936018074033256,
"acc_norm": 0.79,
"acc_norm_stderr": 0.040936018074033256
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.8666666666666667,
"acc_stderr": 0.026544435312706467,
"acc_norm": 0.8666666666666667,
"acc_norm_stderr": 0.026544435312706467
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.9292929292929293,
"acc_stderr": 0.01826310542019948,
"acc_norm": 0.9292929292929293,
"acc_norm_stderr": 0.01826310542019948
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9844559585492227,
"acc_stderr": 0.008927492715084334,
"acc_norm": 0.9844559585492227,
"acc_norm_stderr": 0.008927492715084334
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.8025641025641026,
"acc_stderr": 0.02018264696867485,
"acc_norm": 0.8025641025641026,
"acc_norm_stderr": 0.02018264696867485
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.5037037037037037,
"acc_stderr": 0.03048470166508437,
"acc_norm": 0.5037037037037037,
"acc_norm_stderr": 0.03048470166508437
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.8529411764705882,
"acc_stderr": 0.023005459446673936,
"acc_norm": 0.8529411764705882,
"acc_norm_stderr": 0.023005459446673936
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.5894039735099338,
"acc_stderr": 0.04016689594849928,
"acc_norm": 0.5894039735099338,
"acc_norm_stderr": 0.04016689594849928
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.9357798165137615,
"acc_stderr": 0.010510494713201405,
"acc_norm": 0.9357798165137615,
"acc_norm_stderr": 0.010510494713201405
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.7129629629629629,
"acc_stderr": 0.03085199299325701,
"acc_norm": 0.7129629629629629,
"acc_norm_stderr": 0.03085199299325701
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.9215686274509803,
"acc_stderr": 0.018869514646658928,
"acc_norm": 0.9215686274509803,
"acc_norm_stderr": 0.018869514646658928
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.9113924050632911,
"acc_stderr": 0.018498315206865384,
"acc_norm": 0.9113924050632911,
"acc_norm_stderr": 0.018498315206865384
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.8161434977578476,
"acc_stderr": 0.025998379092356513,
"acc_norm": 0.8161434977578476,
"acc_norm_stderr": 0.025998379092356513
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8549618320610687,
"acc_stderr": 0.030884661089515375,
"acc_norm": 0.8549618320610687,
"acc_norm_stderr": 0.030884661089515375
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8925619834710744,
"acc_stderr": 0.028268812192540616,
"acc_norm": 0.8925619834710744,
"acc_norm_stderr": 0.028268812192540616
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8703703703703703,
"acc_stderr": 0.03247224389917947,
"acc_norm": 0.8703703703703703,
"acc_norm_stderr": 0.03247224389917947
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8773006134969326,
"acc_stderr": 0.025777328426978927,
"acc_norm": 0.8773006134969326,
"acc_norm_stderr": 0.025777328426978927
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.7053571428571429,
"acc_stderr": 0.0432704093257873,
"acc_norm": 0.7053571428571429,
"acc_norm_stderr": 0.0432704093257873
},
"harness|hendrycksTest-management|5": {
"acc": 0.8737864077669902,
"acc_stderr": 0.03288180278808628,
"acc_norm": 0.8737864077669902,
"acc_norm_stderr": 0.03288180278808628
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.9401709401709402,
"acc_stderr": 0.015537514263253874,
"acc_norm": 0.9401709401709402,
"acc_norm_stderr": 0.015537514263253874
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.87,
"acc_stderr": 0.03379976689896308,
"acc_norm": 0.87,
"acc_norm_stderr": 0.03379976689896308
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.9233716475095786,
"acc_stderr": 0.00951217069932386,
"acc_norm": 0.9233716475095786,
"acc_norm_stderr": 0.00951217069932386
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.8497109826589595,
"acc_stderr": 0.01923931878390472,
"acc_norm": 0.8497109826589595,
"acc_norm_stderr": 0.01923931878390472
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.823463687150838,
"acc_stderr": 0.012751770640520499,
"acc_norm": 0.823463687150838,
"acc_norm_stderr": 0.012751770640520499
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.8562091503267973,
"acc_stderr": 0.020091188936043725,
"acc_norm": 0.8562091503267973,
"acc_norm_stderr": 0.020091188936043725
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.842443729903537,
"acc_stderr": 0.020692237273583984,
"acc_norm": 0.842443729903537,
"acc_norm_stderr": 0.020692237273583984
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.8734567901234568,
"acc_stderr": 0.018498600558790906,
"acc_norm": 0.8734567901234568,
"acc_norm_stderr": 0.018498600558790906
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.6631205673758865,
"acc_stderr": 0.02819553487396673,
"acc_norm": 0.6631205673758865,
"acc_norm_stderr": 0.02819553487396673
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.6323337679269883,
"acc_stderr": 0.012314845910071703,
"acc_norm": 0.6323337679269883,
"acc_norm_stderr": 0.012314845910071703
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.8529411764705882,
"acc_stderr": 0.02151396405285963,
"acc_norm": 0.8529411764705882,
"acc_norm_stderr": 0.02151396405285963
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.8300653594771242,
"acc_stderr": 0.015194153113184724,
"acc_norm": 0.8300653594771242,
"acc_norm_stderr": 0.015194153113184724
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.7818181818181819,
"acc_stderr": 0.03955932861795833,
"acc_norm": 0.7818181818181819,
"acc_norm_stderr": 0.03955932861795833
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.8163265306122449,
"acc_stderr": 0.024789071332007646,
"acc_norm": 0.8163265306122449,
"acc_norm_stderr": 0.024789071332007646
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8855721393034826,
"acc_stderr": 0.022509345325101716,
"acc_norm": 0.8855721393034826,
"acc_norm_stderr": 0.022509345325101716
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.95,
"acc_stderr": 0.021904291355759026,
"acc_norm": 0.95,
"acc_norm_stderr": 0.021904291355759026
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5602409638554217,
"acc_stderr": 0.03864139923699122,
"acc_norm": 0.5602409638554217,
"acc_norm_stderr": 0.03864139923699122
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8713450292397661,
"acc_stderr": 0.025679342723276894,
"acc_norm": 0.8713450292397661,
"acc_norm_stderr": 0.025679342723276894
},
"harness|truthfulqa:mc|0": {
"mc1": 0.6499388004895961,
"mc1_stderr": 0.016697949420151022,
"mc2": 0.7508968077654237,
"mc2_stderr": 0.014534916537858438
},
"harness|winogrande|5": {
"acc": 0.8737174427782163,
"acc_stderr": 0.009335559129908452
},
"harness|gsm8k|5": {
"acc": 0.7680060652009097,
"acc_stderr": 0.011626873175092412
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
veerav96/sokoto_real | ---
license: apache-2.0
---
|
GabrielTOP/Matheus | ---
license: openrail
---
|
wangxinze/verilog_prepare | ---
license: apache-2.0
---
|
when2rl/OpenHermesPreferences_reformatted | ---
dataset_info:
features:
- name: prompt
dtype: string
- name: prompt_id
dtype: string
- name: chosen
list:
- name: content
dtype: string
- name: role
dtype: string
- name: rejected
list:
- name: content
dtype: string
- name: role
dtype: string
- name: messages
list:
- name: content
dtype: string
- name: role
dtype: string
- name: score_chosen
dtype: float64
- name: score_rejected
dtype: float64
- name: other_info
struct:
- name: candidate_policies
sequence: string
- name: candidates_completions
sequence: string
- name: category
dtype: string
- name: chosen_policy
dtype: string
- name: chosen_rank
dtype: int64
- name: rank_str
dtype: string
- name: ranks
sequence: int64
- name: rejected_policy
dtype: string
- name: rejected_rank
dtype: int64
- name: source
dtype: string
splits:
- name: train
num_bytes: 9078591768
num_examples: 989490
download_size: 4430641969
dataset_size: 9078591768
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for OpenHermesPreferences_reformatted
<!-- Provide a quick summary of the dataset. -->
This is a reformatted version of argilla's OpenHermesPreference:
1. reformatted the daatset to be consistent with ultrafeedback_binarized.
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
juancopi81/orca-math-word-problems-100020_110022 | ---
dataset_info:
features:
- name: question
dtype: string
- name: answer
dtype: string
splits:
- name: train
num_bytes: 10803481
num_examples: 10002
download_size: 3904854
dataset_size: 10803481
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
dipanjanS/imdb_sentiment_finetune_dataset | ---
dataset_info:
features:
- name: review
dtype: string
- name: sentiment
dtype: int64
splits:
- name: train
num_bytes: 2588807
num_examples: 2000
- name: validation
num_bytes: 2667965
num_examples: 2000
- name: test
num_bytes: 21177655
num_examples: 16000
download_size: 17194624
dataset_size: 26434427
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
adityarra07/ATC_test_2_noise | ---
dataset_info:
features:
- name: audio
struct:
- name: array
sequence: float64
- name: path
dtype: 'null'
- name: sampling_rate
dtype: int64
- name: transcription
dtype: string
- name: id
dtype: string
splits:
- name: train
num_bytes: 454815537
num_examples: 871
download_size: 455105235
dataset_size: 454815537
---
# Dataset Card for "ATC_test_2_noise"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
CyberHarem/tsurumaki_kokoro_bangdream | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of tsurumaki_kokoro (BanG Dream!)
This is the dataset of tsurumaki_kokoro (BanG Dream!), containing 500 images and their tags.
The core tags of this character are `blonde_hair, bangs, long_hair, yellow_eyes, sidelocks, breasts`, which are pruned in this dataset.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
## List of Packages
| Name | Images | Size | Download | Type | Description |
|:-----------------|---------:|:-----------|:----------------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------|
| raw | 500 | 748.33 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tsurumaki_kokoro_bangdream/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). |
| 800 | 500 | 413.65 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tsurumaki_kokoro_bangdream/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. |
| stage3-p480-800 | 1275 | 906.55 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tsurumaki_kokoro_bangdream/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
| 1200 | 500 | 652.83 MiB | [Download](https://huggingface.co/datasets/CyberHarem/tsurumaki_kokoro_bangdream/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. |
| stage3-p480-1200 | 1275 | 1.30 GiB | [Download](https://huggingface.co/datasets/CyberHarem/tsurumaki_kokoro_bangdream/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
### Load Raw Dataset with Waifuc
We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code
```python
import os
import zipfile
from huggingface_hub import hf_hub_download
from waifuc.source import LocalSource
# download raw archive file
zip_file = hf_hub_download(
repo_id='CyberHarem/tsurumaki_kokoro_bangdream',
repo_type='dataset',
filename='dataset-raw.zip',
)
# extract files to your directory
dataset_dir = 'dataset_dir'
os.makedirs(dataset_dir, exist_ok=True)
with zipfile.ZipFile(zip_file, 'r') as zf:
zf.extractall(dataset_dir)
# load the dataset with waifuc
source = LocalSource(dataset_dir)
for item in source:
print(item.image, item.meta['filename'], item.meta['tags'])
```
## List of Clusters
List of tag clustering result, maybe some outfits can be mined here.
### Raw Text Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags |
|----:|----------:|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| 0 | 8 |  |  |  |  |  | looking_at_viewer, short_sleeves, wrist_cuffs, 1girl, earrings, hair_bow, midriff, smile, solo, blush, thighhighs, crop_top, navel, necklace, blue_skirt, multicolored_skirt, open_mouth, belt, boots, choker, frilled_skirt, layered_skirt, multicolored_shirt, polka_dot, see-through_sleeves, shoes, socks, white_background, white_footwear |
| 1 | 14 |  |  |  |  |  | 1girl, earrings, solo, black_headwear, hat_bow, looking_at_viewer, pom_pom_(clothes), red_bowtie, top_hat, cleavage, medium_breasts, polka_dot_bow, open_mouth, blush, frills, smiley_face, black_shorts, :d, confetti, holding, short_shorts, teeth, white_background, back_bow |
| 2 | 10 |  |  |  |  |  | frills, looking_at_viewer, 1girl, blush, solo, confetti, earrings, white_gloves, hair_bow, open_mouth, star_(symbol), :d, red_bowtie, upper_body, ribbon, string_of_flags, twintails, balloon, blue_bow, corset, short_sleeves, striped_bowtie, top_hat |
| 3 | 22 |  |  |  |  |  | 1girl, solo, looking_at_viewer, blush, bowtie, open_mouth, earrings, wrist_cuffs, frills, choker, fur-trimmed_capelet, striped_bow, mini_crown, navel, ribbon, blue_bow, red_capelet, :d, sparkle, midriff, white_background, one_eye_closed, short_sleeves, upper_body |
| 4 | 6 |  |  |  |  |  | 1girl, epaulettes, looking_at_viewer, open_mouth, shako_cap, sleeveless, solo, upper_body, :d, band_uniform, blush, sash, wrist_cuffs, upper_teeth_only |
| 5 | 13 |  |  |  |  |  | 1girl, :d, looking_at_viewer, open_mouth, solo, white_skirt, band_uniform, blush, epaulettes, wrist_cuffs, shako_cap, sleeveless_shirt, teeth, thighhighs, medium_breasts, sash, frilled_skirt, red_footwear, armpits, standing, thigh_boots, white_background, cowboy_shot, simple_background, star_(symbol) |
| 6 | 32 |  |  |  |  |  | 1girl, solo, looking_at_viewer, short_sleeves, blush, red_shirt, striped_shirt, smile, collarbone, open_mouth, simple_background, white_background, overall_shorts, upper_body, medium_breasts, teeth |
| 7 | 6 |  |  |  |  |  | 1girl, :d, blush, long_sleeves, looking_at_viewer, open_mouth, sheep_horns, sleep_mask, solo, star_(symbol), upper_teeth_only, bow, hair_flower, mask_on_head, headset, sparkle, apron, arms_up, center_frills, frilled_sleeves, pink_rose, ribbon, striped, upper_body |
| 8 | 24 |  |  |  |  |  | 1girl, solo, white_dress, looking_at_viewer, sleeveless_dress, sun_hat, sundress, blush, day, outdoors, straw_hat, sunflower, open_mouth, collarbone, :d, frills, hat_flower, blue_sky, cloud, medium_breasts, cleavage, upper_body, upper_teeth_only |
| 9 | 10 |  |  |  |  |  | 1girl, brown_dress, hanasakigawa_school_uniform, long_sleeves, looking_at_viewer, neck_ribbon, red_ribbon, sailor_dress, solo, double-breasted, white_sailor_collar, blush, open_mouth, white_background, simple_background, pleated_dress, one_eye_closed, teeth, :d, ;d, bow |
| 10 | 10 |  |  |  |  |  | 1girl, looking_at_viewer, pleated_skirt, serafuku, short_sleeves, solo, white_skirt, blush, hanasakigawa_school_uniform, white_sailor_collar, white_background, blue_neckerchief, blue_shirt, open_mouth, simple_background, miniskirt, collarbone, :d |
| 11 | 5 |  |  |  |  |  | 1girl, earrings, smile, solo, star_(symbol), looking_at_viewer, beret, blush, bowtie, frilled_shirt_collar, hat_bow, short_sleeves, upper_body, alternate_hairstyle, argyle, center_frills, christmas, cleavage, constellation_print, dress, frilled_sleeves, hairclip, headset, long_sleeves, red_headwear, ribbon, striped_bow, twintails |
| 12 | 5 |  |  |  |  |  | 1girl, blush, hair_flower, hair_ribbon, solo, sunflower, twintails, :d, bracelet, looking_at_viewer, open_mouth, alternate_hairstyle, blue_dress, blue_ribbon, frills, holding, necklace, beachball, bow, day, hairband, polka_dot, sky, sparkle, upper_body, white_background |
| 13 | 8 |  |  |  |  |  | 1girl, looking_at_viewer, skirt, solo, twintails, white_gloves, handcuffs, midriff, navel, police_hat, star_(symbol), blue_headwear, blush, short_sleeves, crop_top, hair_ribbon, open_mouth, peaked_cap, :d, cropped_jacket, detached_collar, frills, orange_necktie, short_necktie, belt, cleavage, knee_boots, open_clothes, shirt, striped, white_background, white_jacket, white_thighhighs |
| 14 | 14 |  |  |  |  |  | 1girl, kimono, obi, blush, looking_at_viewer, solo, wide_sleeves, floral_print, hair_flower, long_sleeves, ponytail, open_mouth, hair_bow, :d, holding, detached_sleeves, frills, red_flower, red_bow, sky, teeth, upper_body |
| 15 | 16 |  |  |  |  |  | blush, looking_at_viewer, 1girl, solo, nipples, open_mouth, completely_nude, pussy, collarbone, navel, simple_background, barefoot, large_breasts, medium_breasts, stomach, white_background, :d, fingernails, full_body, anus, armpits, blunt_bangs, cleft_of_venus, heart, toes, uncensored |
| 16 | 5 |  |  |  |  |  | 1boy, 1girl, blush, hetero, medium_breasts, open_mouth, solo_focus, sweat, completely_nude, saliva, sex_from_behind, heart, heavy_breathing, nipples, standing_sex, tears, upper_teeth_only, vaginal, :d, arm_support, ass, bent_over, cum, from_side, indoors, large_breasts, looking_at_viewer, looking_to_the_side, mosaic_censoring, motion_lines, penis, torso_grab, trembling |
### Table Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | looking_at_viewer | short_sleeves | wrist_cuffs | 1girl | earrings | hair_bow | midriff | smile | solo | blush | thighhighs | crop_top | navel | necklace | blue_skirt | multicolored_skirt | open_mouth | belt | boots | choker | frilled_skirt | layered_skirt | multicolored_shirt | polka_dot | see-through_sleeves | shoes | socks | white_background | white_footwear | black_headwear | hat_bow | pom_pom_(clothes) | red_bowtie | top_hat | cleavage | medium_breasts | polka_dot_bow | frills | smiley_face | black_shorts | :d | confetti | holding | short_shorts | teeth | back_bow | white_gloves | star_(symbol) | upper_body | ribbon | string_of_flags | twintails | balloon | blue_bow | corset | striped_bowtie | bowtie | fur-trimmed_capelet | striped_bow | mini_crown | red_capelet | sparkle | one_eye_closed | epaulettes | shako_cap | sleeveless | band_uniform | sash | upper_teeth_only | white_skirt | sleeveless_shirt | red_footwear | armpits | standing | thigh_boots | cowboy_shot | simple_background | red_shirt | striped_shirt | collarbone | overall_shorts | long_sleeves | sheep_horns | sleep_mask | bow | hair_flower | mask_on_head | headset | apron | arms_up | center_frills | frilled_sleeves | pink_rose | striped | white_dress | sleeveless_dress | sun_hat | sundress | day | outdoors | straw_hat | sunflower | hat_flower | blue_sky | cloud | brown_dress | hanasakigawa_school_uniform | neck_ribbon | red_ribbon | sailor_dress | double-breasted | white_sailor_collar | pleated_dress | ;d | pleated_skirt | serafuku | blue_neckerchief | blue_shirt | miniskirt | beret | frilled_shirt_collar | alternate_hairstyle | argyle | christmas | constellation_print | dress | hairclip | red_headwear | hair_ribbon | bracelet | blue_dress | blue_ribbon | beachball | hairband | sky | skirt | handcuffs | police_hat | blue_headwear | peaked_cap | cropped_jacket | detached_collar | orange_necktie | short_necktie | knee_boots | open_clothes | shirt | white_jacket | white_thighhighs | kimono | obi | wide_sleeves | floral_print | ponytail | detached_sleeves | red_flower | red_bow | nipples | completely_nude | pussy | barefoot | large_breasts | stomach | fingernails | full_body | anus | blunt_bangs | cleft_of_venus | heart | toes | uncensored | 1boy | hetero | solo_focus | sweat | saliva | sex_from_behind | heavy_breathing | standing_sex | tears | vaginal | arm_support | ass | bent_over | cum | from_side | indoors | looking_to_the_side | mosaic_censoring | motion_lines | penis | torso_grab | trembling |
|----:|----------:|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:----------------------------------|:--------------------|:----------------|:--------------|:--------|:-----------|:-----------|:----------|:--------|:-------|:--------|:-------------|:-----------|:--------|:-----------|:-------------|:---------------------|:-------------|:-------|:--------|:---------|:----------------|:----------------|:---------------------|:------------|:----------------------|:--------|:--------|:-------------------|:-----------------|:-----------------|:----------|:--------------------|:-------------|:----------|:-----------|:-----------------|:----------------|:---------|:--------------|:---------------|:-----|:-----------|:----------|:---------------|:--------|:-----------|:---------------|:----------------|:-------------|:---------|:------------------|:------------|:----------|:-----------|:---------|:-----------------|:---------|:----------------------|:--------------|:-------------|:--------------|:----------|:-----------------|:-------------|:------------|:-------------|:---------------|:-------|:-------------------|:--------------|:-------------------|:---------------|:----------|:-----------|:--------------|:--------------|:--------------------|:------------|:----------------|:-------------|:-----------------|:---------------|:--------------|:-------------|:------|:--------------|:---------------|:----------|:--------|:----------|:----------------|:------------------|:------------|:----------|:--------------|:-------------------|:----------|:-----------|:------|:-----------|:------------|:------------|:-------------|:-----------|:--------|:--------------|:------------------------------|:--------------|:-------------|:---------------|:------------------|:----------------------|:----------------|:-----|:----------------|:-----------|:-------------------|:-------------|:------------|:--------|:-----------------------|:----------------------|:---------|:------------|:----------------------|:--------|:-----------|:---------------|:--------------|:-----------|:-------------|:--------------|:------------|:-----------|:------|:--------|:------------|:-------------|:----------------|:-------------|:-----------------|:------------------|:-----------------|:----------------|:-------------|:---------------|:--------|:---------------|:-------------------|:---------|:------|:---------------|:---------------|:-----------|:-------------------|:-------------|:----------|:----------|:------------------|:--------|:-----------|:----------------|:----------|:--------------|:------------|:-------|:--------------|:-----------------|:--------|:-------|:-------------|:-------|:---------|:-------------|:--------|:---------|:------------------|:------------------|:---------------|:--------|:----------|:--------------|:------|:------------|:------|:------------|:----------|:----------------------|:-------------------|:---------------|:--------|:-------------|:------------|
| 0 | 8 |  |  |  |  |  | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 1 | 14 |  |  |  |  |  | X | | | X | X | | | | X | X | | | | | | | X | | | | | | | | | | | X | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 2 | 10 |  |  |  |  |  | X | X | | X | X | X | | | X | X | | | | | | | X | | | | | | | | | | | | | | | | X | X | | | | X | | | X | X | | | | | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 3 | 22 |  |  |  |  |  | X | X | X | X | X | | X | | X | X | | | X | | | | X | | | X | | | | | | | | X | | | | | | | | | | X | | | X | | | | | | | | X | X | | | | X | | | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 4 | 6 |  |  |  |  |  | X | | X | X | | | | | X | X | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | | | X | | | | | | | | | | | | | | | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 5 | 13 |  |  |  |  |  | X | | X | X | | | | | X | X | X | | | | | | X | | | | X | | | | | | | X | | | | | | | | X | | | | | X | | | | X | | | X | | | | | | | | | | | | | | | | X | X | | X | X | | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 6 | 32 |  |  |  |  |  | X | X | | X | | | | X | X | X | | | | | | | X | | | | | | | | | | | X | | | | | | | | X | | | | | | | | | X | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 7 | 6 |  |  |  |  |  | X | | | X | | | | | X | X | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | | X | X | X | | | | | | | | | | | | X | | | | | | | X | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 8 | 24 |  |  |  |  |  | X | | | X | | | | | X | X | | | | | | | X | | | | | | | | | | | | | | | | | | X | X | | X | | | X | | | | | | | | X | | | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | X | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 9 | 10 |  |  |  |  |  | X | | | X | | | | | X | X | | | | | | | X | | | | | | | | | | | X | | | | | | | | | | | | | X | | | | X | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | | | | X | | | | | X | | | X | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 10 | 10 |  |  |  |  |  | X | X | | X | | | | | X | X | | | | | | | X | | | | | | | | | | | X | | | | | | | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | | X | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | X | | | | | X | | | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 11 | 5 |  |  |  |  |  | X | X | | X | X | | | X | X | X | | | | | | | | | | | | | | | | | | | | | X | | | | X | | | | | | | | | | | | | X | X | X | | X | | | | | X | | X | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | X | | | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 12 | 5 |  |  |  |  |  | X | | | X | | | | | X | X | | | | X | | | X | | | | | | | X | | | | X | | | | | | | | | | X | | | X | | X | | | | | | X | | | X | | | | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | X | X | | | | | | | | | | | | | X | | | X | | | | | | | | | | | | | | | | | | | | X | | | | | | | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 13 | 8 |  |  |  |  |  | X | X | | X | | | X | | X | X | | X | X | | | | X | X | | | | | | | | | | X | | | | | | | X | | | X | | | X | | | | | | X | X | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 14 | 14 |  |  |  |  |  | X | | | X | | X | | | X | X | | | | | | | X | | | | | | | | | | | | | | | | | | | | | X | | | X | | X | | X | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | |
| 15 | 16 |  |  |  |  |  | X | | | X | | | | | X | X | | | X | | | | X | | | | | | | | | | | X | | | | | | | | X | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | | | | X | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | | | |
| 16 | 5 |  |  |  |  |  | X | | | X | | | | | | X | | | | | | | X | | | | | | | | | | | | | | | | | | | X | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | | X | X | | | X | | | | | | | X | | | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
|
shidowake/Doctor-Shotgun_capybara-sharegpt_subset_split_5 | ---
dataset_info:
features:
- name: source
dtype: string
- name: conversations
list:
- name: from
dtype: string
- name: value
dtype: string
splits:
- name: train
num_bytes: 9064100.571348244
num_examples: 2001
download_size: 4725927
dataset_size: 9064100.571348244
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
rhaymison/questions_answers_geo_nord | ---
language:
- pt
license: apache-2.0
size_categories:
- 10K<n<100K
task_categories:
- text-generation
pretty_name: geografia Nordestina
dataset_info:
features:
- name: question
dtype: string
- name: answer
dtype: string
splits:
- name: train
num_bytes: 11397122.0
num_examples: 44004
download_size: 5647644
dataset_size: 11397122.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
tags:
- portugues
- nordeste
- mixtral
---
|
results-sd-v1-5-sd-v2-1-if-v1-0-karlo/d718bf95 | ---
dataset_info:
features:
- name: result
dtype: string
- name: id
dtype: int64
splits:
- name: train
num_bytes: 188
num_examples: 10
download_size: 1335
dataset_size: 188
---
# Dataset Card for "d718bf95"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.