id stringlengths 2 115 | lastModified stringlengths 24 24 | tags list | author stringlengths 2 42 ⌀ | description stringlengths 0 68.7k ⌀ | citation stringlengths 0 10.7k ⌀ | cardData null | likes int64 0 3.55k | downloads int64 0 10.1M | card stringlengths 0 1.01M |
|---|---|---|---|---|---|---|---|---|---|
fundrais123/github-issues | 2023-09-09T14:48:21.000Z | [
"region:us"
] | fundrais123 | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: url
dtype: string
- name: repository_url
dtype: string
- name: labels_url
dtype: string
- name: comments_url
dtype: string
- name: events_url
dtype: string
- name: html_url
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: number
dtype: int64
- name: title
dtype: string
- name: user
struct:
- name: avatar_url
dtype: string
- name: events_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: gravatar_id
dtype: string
- name: html_url
dtype: string
- name: id
dtype: int64
- name: login
dtype: string
- name: node_id
dtype: string
- name: organizations_url
dtype: string
- name: received_events_url
dtype: string
- name: repos_url
dtype: string
- name: site_admin
dtype: bool
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: type
dtype: string
- name: url
dtype: string
- name: labels
list:
- name: color
dtype: string
- name: default
dtype: bool
- name: description
dtype: string
- name: id
dtype: int64
- name: name
dtype: string
- name: node_id
dtype: string
- name: url
dtype: string
- name: state
dtype: string
- name: locked
dtype: bool
- name: assignee
struct:
- name: avatar_url
dtype: string
- name: events_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: gravatar_id
dtype: string
- name: html_url
dtype: string
- name: id
dtype: int64
- name: login
dtype: string
- name: node_id
dtype: string
- name: organizations_url
dtype: string
- name: received_events_url
dtype: string
- name: repos_url
dtype: string
- name: site_admin
dtype: bool
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: type
dtype: string
- name: url
dtype: string
- name: assignees
list:
- name: avatar_url
dtype: string
- name: events_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: gravatar_id
dtype: string
- name: html_url
dtype: string
- name: id
dtype: int64
- name: login
dtype: string
- name: node_id
dtype: string
- name: organizations_url
dtype: string
- name: received_events_url
dtype: string
- name: repos_url
dtype: string
- name: site_admin
dtype: bool
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: type
dtype: string
- name: url
dtype: string
- name: milestone
struct:
- name: closed_at
dtype: string
- name: closed_issues
dtype: int64
- name: created_at
dtype: string
- name: creator
struct:
- name: avatar_url
dtype: string
- name: events_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: gravatar_id
dtype: string
- name: html_url
dtype: string
- name: id
dtype: int64
- name: login
dtype: string
- name: node_id
dtype: string
- name: organizations_url
dtype: string
- name: received_events_url
dtype: string
- name: repos_url
dtype: string
- name: site_admin
dtype: bool
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: type
dtype: string
- name: url
dtype: string
- name: description
dtype: string
- name: due_on
dtype: string
- name: html_url
dtype: string
- name: id
dtype: int64
- name: labels_url
dtype: string
- name: node_id
dtype: string
- name: number
dtype: int64
- name: open_issues
dtype: int64
- name: state
dtype: string
- name: title
dtype: string
- name: updated_at
dtype: string
- name: url
dtype: string
- name: comments
sequence: string
- name: created_at
dtype: string
- name: updated_at
dtype: string
- name: closed_at
dtype: string
- name: author_association
dtype: string
- name: active_lock_reason
dtype: 'null'
- name: draft
dtype: bool
- name: pull_request
struct:
- name: diff_url
dtype: string
- name: html_url
dtype: string
- name: merged_at
dtype: string
- name: patch_url
dtype: string
- name: url
dtype: string
- name: body
dtype: string
- name: reactions
struct:
- name: '+1'
dtype: int64
- name: '-1'
dtype: int64
- name: confused
dtype: int64
- name: eyes
dtype: int64
- name: heart
dtype: int64
- name: hooray
dtype: int64
- name: laugh
dtype: int64
- name: rocket
dtype: int64
- name: total_count
dtype: int64
- name: url
dtype: string
- name: timeline_url
dtype: string
- name: performed_via_github_app
dtype: 'null'
- name: state_reason
dtype: string
- name: is_pull_request
dtype: bool
splits:
- name: train
num_bytes: 24117020
num_examples: 4000
download_size: 6802855
dataset_size: 24117020
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "github-issues"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Roscall/elvis60s | 2023-09-09T14:50:44.000Z | [
"region:us"
] | Roscall | null | null | null | 0 | 0 | Entry not found |
benmshultz/RuFal_fallacy_detection | 2023-09-09T14:54:02.000Z | [
"region:us"
] | benmshultz | null | null | null | 0 | 0 | This dataset contains 700 annotated Russiang government tweets, labelled with 13 logical fallacy types, as proposed in the novel Logical Fallacy Detection task (Jin et al.). It is split into 595 train samples and 105 test samples. |
LxYxvv/ChinaDaily_EN_ZH | 2023-09-09T15:04:52.000Z | [
"license:mit",
"region:us"
] | LxYxvv | null | null | null | 0 | 0 | ---
license: mit
---
|
dafuqCSS/Flowyx | 2023-09-09T20:01:01.000Z | [
"region:us"
] | dafuqCSS | null | null | null | 0 | 0 | Entry not found |
kamyarazimi/ConcreteCrackDataset2 | 2023-09-09T15:30:26.000Z | [
"region:us"
] | kamyarazimi | null | null | null | 0 | 0 | Entry not found |
vphu123/llm_data | 2023-09-09T15:45:15.000Z | [
"region:us"
] | vphu123 | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 45546
num_examples: 26
download_size: 21872
dataset_size: 45546
---
# Dataset Card for "llm_data"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
indra-inc/rvl_cdip_train600_valid100_ground_truth | 2023-09-09T15:37:53.000Z | [
"region:us"
] | indra-inc | null | null | null | 0 | 0 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: valid
path: data/valid-*
dataset_info:
features:
- name: Image_id
dtype:
class_label:
names:
'0': advertisement
'1': budget
'2': email
'3': file_folder
'4': form
'5': handwritten
'6': invoice
'7': letter
'8': memo
'9': news_article
'10': presentation
'11': questionnaire
'12': resume
'13': scientific_publication
'14': scientific_report
'15': specification
- name: Image_raw
dtype: image
- name: ground_truth
dtype: string
splits:
- name: train
num_bytes: 1313222936.0
num_examples: 9600
- name: valid
num_bytes: 180924349.4
num_examples: 1600
download_size: 1281715268
dataset_size: 1494147285.4
---
# Dataset Card for "rvl_cdip_train600_valid100_doc_classification"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
open-llm-leaderboard/details_cyberagent__open-calm-large | 2023-09-09T15:43:22.000Z | [
"region:us"
] | open-llm-leaderboard | null | null | null | 0 | 0 | ---
pretty_name: Evaluation run of cyberagent/open-calm-large
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [cyberagent/open-calm-large](https://huggingface.co/cyberagent/open-calm-large)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 61 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the agregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cyberagent__open-calm-large\"\
,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\
\nThese are the [latest results from run 2023-09-09T15:42:03.677218](https://huggingface.co/datasets/open-llm-leaderboard/details_cyberagent__open-calm-large/blob/main/results_2023-09-09T15-42-03.677218.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.251445177684535,\n\
\ \"acc_stderr\": 0.03132207506482574,\n \"acc_norm\": 0.25228092536806246,\n\
\ \"acc_norm_stderr\": 0.031336343647221425,\n \"mc1\": 0.2521419828641371,\n\
\ \"mc1_stderr\": 0.015201522246299962,\n \"mc2\": 0.4652388251878066,\n\
\ \"mc2_stderr\": 0.015621160279545679\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.17406143344709898,\n \"acc_stderr\": 0.011080177129482205,\n\
\ \"acc_norm\": 0.20733788395904437,\n \"acc_norm_stderr\": 0.011846905782971383\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2795259908384784,\n\
\ \"acc_stderr\": 0.004478491697891248,\n \"acc_norm\": 0.29555865365465045,\n\
\ \"acc_norm_stderr\": 0.004553609405747228\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \
\ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.23703703703703705,\n\
\ \"acc_stderr\": 0.03673731683969506,\n \"acc_norm\": 0.23703703703703705,\n\
\ \"acc_norm_stderr\": 0.03673731683969506\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.18421052631578946,\n \"acc_stderr\": 0.0315469804508223,\n\
\ \"acc_norm\": 0.18421052631578946,\n \"acc_norm_stderr\": 0.0315469804508223\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.24,\n\
\ \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \
\ \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.25660377358490566,\n \"acc_stderr\": 0.02688064788905198,\n\
\ \"acc_norm\": 0.25660377358490566,\n \"acc_norm_stderr\": 0.02688064788905198\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2222222222222222,\n\
\ \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.2222222222222222,\n\
\ \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \
\ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n\
\ \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2658959537572254,\n\
\ \"acc_stderr\": 0.03368762932259433,\n \"acc_norm\": 0.2658959537572254,\n\
\ \"acc_norm_stderr\": 0.03368762932259433\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.10784313725490197,\n \"acc_stderr\": 0.03086428212206014,\n\
\ \"acc_norm\": 0.10784313725490197,\n \"acc_norm_stderr\": 0.03086428212206014\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n\
\ \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.2,\n \"acc_stderr\": 0.0261488180184245,\n \
\ \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.0261488180184245\n },\n\
\ \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n\
\ \"acc_stderr\": 0.041424397194893624,\n \"acc_norm\": 0.2631578947368421,\n\
\ \"acc_norm_stderr\": 0.041424397194893624\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.20689655172413793,\n \"acc_stderr\": 0.03375672449560553,\n\
\ \"acc_norm\": 0.20689655172413793,\n \"acc_norm_stderr\": 0.03375672449560553\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.23809523809523808,\n \"acc_stderr\": 0.02193587808118476,\n \"\
acc_norm\": 0.23809523809523808,\n \"acc_norm_stderr\": 0.02193587808118476\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.24603174603174602,\n\
\ \"acc_stderr\": 0.03852273364924316,\n \"acc_norm\": 0.24603174603174602,\n\
\ \"acc_norm_stderr\": 0.03852273364924316\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \
\ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.22258064516129034,\n \"acc_stderr\": 0.023664216671642528,\n \"\
acc_norm\": 0.22258064516129034,\n \"acc_norm_stderr\": 0.023664216671642528\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.2857142857142857,\n \"acc_stderr\": 0.031785297106427496,\n \"\
acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.031785297106427496\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\"\
: 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.19393939393939394,\n \"acc_stderr\": 0.030874145136562094,\n\
\ \"acc_norm\": 0.19393939393939394,\n \"acc_norm_stderr\": 0.030874145136562094\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.22727272727272727,\n \"acc_stderr\": 0.02985751567338639,\n \"\
acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.02985751567338639\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.2849740932642487,\n \"acc_stderr\": 0.032577140777096586,\n\
\ \"acc_norm\": 0.2849740932642487,\n \"acc_norm_stderr\": 0.032577140777096586\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.258974358974359,\n \"acc_stderr\": 0.02221110681006166,\n \
\ \"acc_norm\": 0.258974358974359,\n \"acc_norm_stderr\": 0.02221110681006166\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.22962962962962963,\n \"acc_stderr\": 0.02564410863926762,\n \
\ \"acc_norm\": 0.22962962962962963,\n \"acc_norm_stderr\": 0.02564410863926762\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.23109243697478993,\n \"acc_stderr\": 0.027381406927868956,\n\
\ \"acc_norm\": 0.23109243697478993,\n \"acc_norm_stderr\": 0.027381406927868956\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526733,\n \"\
acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526733\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.26605504587155965,\n \"acc_stderr\": 0.01894602232222559,\n \"\
acc_norm\": 0.26605504587155965,\n \"acc_norm_stderr\": 0.01894602232222559\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.44907407407407407,\n \"acc_stderr\": 0.03392238405321617,\n \"\
acc_norm\": 0.44907407407407407,\n \"acc_norm_stderr\": 0.03392238405321617\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.24019607843137256,\n \"acc_stderr\": 0.02998373305591361,\n \"\
acc_norm\": 0.24019607843137256,\n \"acc_norm_stderr\": 0.02998373305591361\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.26582278481012656,\n \"acc_stderr\": 0.028756799629658342,\n \
\ \"acc_norm\": 0.26582278481012656,\n \"acc_norm_stderr\": 0.028756799629658342\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.19282511210762332,\n\
\ \"acc_stderr\": 0.02647824096048936,\n \"acc_norm\": 0.19282511210762332,\n\
\ \"acc_norm_stderr\": 0.02647824096048936\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.25190839694656486,\n \"acc_stderr\": 0.03807387116306085,\n\
\ \"acc_norm\": 0.25190839694656486,\n \"acc_norm_stderr\": 0.03807387116306085\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.3884297520661157,\n \"acc_stderr\": 0.04449270350068382,\n \"\
acc_norm\": 0.3884297520661157,\n \"acc_norm_stderr\": 0.04449270350068382\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.21296296296296297,\n\
\ \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.21296296296296297,\n\
\ \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.26993865030674846,\n \"acc_stderr\": 0.034878251684978906,\n\
\ \"acc_norm\": 0.26993865030674846,\n \"acc_norm_stderr\": 0.034878251684978906\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.22321428571428573,\n\
\ \"acc_stderr\": 0.039523019677025116,\n \"acc_norm\": 0.22321428571428573,\n\
\ \"acc_norm_stderr\": 0.039523019677025116\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.24271844660194175,\n \"acc_stderr\": 0.042450224863844935,\n\
\ \"acc_norm\": 0.24271844660194175,\n \"acc_norm_stderr\": 0.042450224863844935\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2094017094017094,\n\
\ \"acc_stderr\": 0.026655699653922768,\n \"acc_norm\": 0.2094017094017094,\n\
\ \"acc_norm_stderr\": 0.026655699653922768\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.13,\n \"acc_stderr\": 0.033799766898963086,\n \
\ \"acc_norm\": 0.13,\n \"acc_norm_stderr\": 0.033799766898963086\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.26947637292464877,\n\
\ \"acc_stderr\": 0.015866243073215044,\n \"acc_norm\": 0.26947637292464877,\n\
\ \"acc_norm_stderr\": 0.015866243073215044\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.27167630057803466,\n \"acc_stderr\": 0.02394851290546835,\n\
\ \"acc_norm\": 0.27167630057803466,\n \"acc_norm_stderr\": 0.02394851290546835\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.25251396648044694,\n\
\ \"acc_stderr\": 0.014530330201468655,\n \"acc_norm\": 0.25251396648044694,\n\
\ \"acc_norm_stderr\": 0.014530330201468655\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.238562091503268,\n \"acc_stderr\": 0.02440439492808787,\n\
\ \"acc_norm\": 0.238562091503268,\n \"acc_norm_stderr\": 0.02440439492808787\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.26366559485530544,\n\
\ \"acc_stderr\": 0.02502553850053234,\n \"acc_norm\": 0.26366559485530544,\n\
\ \"acc_norm_stderr\": 0.02502553850053234\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.28703703703703703,\n \"acc_stderr\": 0.025171041915309684,\n\
\ \"acc_norm\": 0.28703703703703703,\n \"acc_norm_stderr\": 0.025171041915309684\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.26595744680851063,\n \"acc_stderr\": 0.02635806569888059,\n \
\ \"acc_norm\": 0.26595744680851063,\n \"acc_norm_stderr\": 0.02635806569888059\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.22359843546284225,\n\
\ \"acc_stderr\": 0.010641589542841378,\n \"acc_norm\": 0.22359843546284225,\n\
\ \"acc_norm_stderr\": 0.010641589542841378\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.34558823529411764,\n \"acc_stderr\": 0.028888193103988644,\n\
\ \"acc_norm\": 0.34558823529411764,\n \"acc_norm_stderr\": 0.028888193103988644\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.2549019607843137,\n \"acc_stderr\": 0.01763082737514838,\n \
\ \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.01763082737514838\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.17272727272727273,\n\
\ \"acc_stderr\": 0.03620691833929218,\n \"acc_norm\": 0.17272727272727273,\n\
\ \"acc_norm_stderr\": 0.03620691833929218\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.22448979591836735,\n \"acc_stderr\": 0.026711430555538408,\n\
\ \"acc_norm\": 0.22448979591836735,\n \"acc_norm_stderr\": 0.026711430555538408\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23383084577114427,\n\
\ \"acc_stderr\": 0.029929415408348387,\n \"acc_norm\": 0.23383084577114427,\n\
\ \"acc_norm_stderr\": 0.029929415408348387\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \
\ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3253012048192771,\n\
\ \"acc_stderr\": 0.03647168523683226,\n \"acc_norm\": 0.3253012048192771,\n\
\ \"acc_norm_stderr\": 0.03647168523683226\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.21637426900584794,\n \"acc_stderr\": 0.03158149539338734,\n\
\ \"acc_norm\": 0.21637426900584794,\n \"acc_norm_stderr\": 0.03158149539338734\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2521419828641371,\n\
\ \"mc1_stderr\": 0.015201522246299962,\n \"mc2\": 0.4652388251878066,\n\
\ \"mc2_stderr\": 0.015621160279545679\n }\n}\n```"
repo_url: https://huggingface.co/cyberagent/open-calm-large
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|arc:challenge|25_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hellaswag|10_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-09-09T15-42-03.677218.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-management|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-virology|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-09-09T15-42-03.677218.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- '**/details_harness|truthfulqa:mc|0_2023-09-09T15-42-03.677218.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-09-09T15-42-03.677218.parquet'
- config_name: results
data_files:
- split: 2023_09_09T15_42_03.677218
path:
- results_2023-09-09T15-42-03.677218.parquet
- split: latest
path:
- results_2023-09-09T15-42-03.677218.parquet
---
# Dataset Card for Evaluation run of cyberagent/open-calm-large
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/cyberagent/open-calm-large
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [cyberagent/open-calm-large](https://huggingface.co/cyberagent/open-calm-large) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_cyberagent__open-calm-large",
"harness_truthfulqa_mc_0",
split="train")
```
## Latest results
These are the [latest results from run 2023-09-09T15:42:03.677218](https://huggingface.co/datasets/open-llm-leaderboard/details_cyberagent__open-calm-large/blob/main/results_2023-09-09T15-42-03.677218.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.251445177684535,
"acc_stderr": 0.03132207506482574,
"acc_norm": 0.25228092536806246,
"acc_norm_stderr": 0.031336343647221425,
"mc1": 0.2521419828641371,
"mc1_stderr": 0.015201522246299962,
"mc2": 0.4652388251878066,
"mc2_stderr": 0.015621160279545679
},
"harness|arc:challenge|25": {
"acc": 0.17406143344709898,
"acc_stderr": 0.011080177129482205,
"acc_norm": 0.20733788395904437,
"acc_norm_stderr": 0.011846905782971383
},
"harness|hellaswag|10": {
"acc": 0.2795259908384784,
"acc_stderr": 0.004478491697891248,
"acc_norm": 0.29555865365465045,
"acc_norm_stderr": 0.004553609405747228
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.26,
"acc_stderr": 0.04408440022768081,
"acc_norm": 0.26,
"acc_norm_stderr": 0.04408440022768081
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.23703703703703705,
"acc_stderr": 0.03673731683969506,
"acc_norm": 0.23703703703703705,
"acc_norm_stderr": 0.03673731683969506
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.18421052631578946,
"acc_stderr": 0.0315469804508223,
"acc_norm": 0.18421052631578946,
"acc_norm_stderr": 0.0315469804508223
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909283,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909283
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.25660377358490566,
"acc_stderr": 0.02688064788905198,
"acc_norm": 0.25660377358490566,
"acc_norm_stderr": 0.02688064788905198
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.2222222222222222,
"acc_stderr": 0.03476590104304134,
"acc_norm": 0.2222222222222222,
"acc_norm_stderr": 0.03476590104304134
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.32,
"acc_stderr": 0.046882617226215034,
"acc_norm": 0.32,
"acc_norm_stderr": 0.046882617226215034
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.04725815626252604,
"acc_norm": 0.33,
"acc_norm_stderr": 0.04725815626252604
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.2658959537572254,
"acc_stderr": 0.03368762932259433,
"acc_norm": 0.2658959537572254,
"acc_norm_stderr": 0.03368762932259433
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.10784313725490197,
"acc_stderr": 0.03086428212206014,
"acc_norm": 0.10784313725490197,
"acc_norm_stderr": 0.03086428212206014
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.22,
"acc_stderr": 0.04163331998932268,
"acc_norm": 0.22,
"acc_norm_stderr": 0.04163331998932268
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.2,
"acc_stderr": 0.0261488180184245,
"acc_norm": 0.2,
"acc_norm_stderr": 0.0261488180184245
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.2631578947368421,
"acc_stderr": 0.041424397194893624,
"acc_norm": 0.2631578947368421,
"acc_norm_stderr": 0.041424397194893624
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.20689655172413793,
"acc_stderr": 0.03375672449560553,
"acc_norm": 0.20689655172413793,
"acc_norm_stderr": 0.03375672449560553
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.23809523809523808,
"acc_stderr": 0.02193587808118476,
"acc_norm": 0.23809523809523808,
"acc_norm_stderr": 0.02193587808118476
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.24603174603174602,
"acc_stderr": 0.03852273364924316,
"acc_norm": 0.24603174603174602,
"acc_norm_stderr": 0.03852273364924316
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.22258064516129034,
"acc_stderr": 0.023664216671642528,
"acc_norm": 0.22258064516129034,
"acc_norm_stderr": 0.023664216671642528
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.2857142857142857,
"acc_stderr": 0.031785297106427496,
"acc_norm": 0.2857142857142857,
"acc_norm_stderr": 0.031785297106427496
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.19393939393939394,
"acc_stderr": 0.030874145136562094,
"acc_norm": 0.19393939393939394,
"acc_norm_stderr": 0.030874145136562094
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.22727272727272727,
"acc_stderr": 0.02985751567338639,
"acc_norm": 0.22727272727272727,
"acc_norm_stderr": 0.02985751567338639
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.2849740932642487,
"acc_stderr": 0.032577140777096586,
"acc_norm": 0.2849740932642487,
"acc_norm_stderr": 0.032577140777096586
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.258974358974359,
"acc_stderr": 0.02221110681006166,
"acc_norm": 0.258974358974359,
"acc_norm_stderr": 0.02221110681006166
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.22962962962962963,
"acc_stderr": 0.02564410863926762,
"acc_norm": 0.22962962962962963,
"acc_norm_stderr": 0.02564410863926762
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.23109243697478993,
"acc_stderr": 0.027381406927868956,
"acc_norm": 0.23109243697478993,
"acc_norm_stderr": 0.027381406927868956
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.31125827814569534,
"acc_stderr": 0.03780445850526733,
"acc_norm": 0.31125827814569534,
"acc_norm_stderr": 0.03780445850526733
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.26605504587155965,
"acc_stderr": 0.01894602232222559,
"acc_norm": 0.26605504587155965,
"acc_norm_stderr": 0.01894602232222559
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.44907407407407407,
"acc_stderr": 0.03392238405321617,
"acc_norm": 0.44907407407407407,
"acc_norm_stderr": 0.03392238405321617
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.24019607843137256,
"acc_stderr": 0.02998373305591361,
"acc_norm": 0.24019607843137256,
"acc_norm_stderr": 0.02998373305591361
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.26582278481012656,
"acc_stderr": 0.028756799629658342,
"acc_norm": 0.26582278481012656,
"acc_norm_stderr": 0.028756799629658342
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.19282511210762332,
"acc_stderr": 0.02647824096048936,
"acc_norm": 0.19282511210762332,
"acc_norm_stderr": 0.02647824096048936
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.25190839694656486,
"acc_stderr": 0.03807387116306085,
"acc_norm": 0.25190839694656486,
"acc_norm_stderr": 0.03807387116306085
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.3884297520661157,
"acc_stderr": 0.04449270350068382,
"acc_norm": 0.3884297520661157,
"acc_norm_stderr": 0.04449270350068382
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.21296296296296297,
"acc_stderr": 0.0395783547198098,
"acc_norm": 0.21296296296296297,
"acc_norm_stderr": 0.0395783547198098
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.26993865030674846,
"acc_stderr": 0.034878251684978906,
"acc_norm": 0.26993865030674846,
"acc_norm_stderr": 0.034878251684978906
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.22321428571428573,
"acc_stderr": 0.039523019677025116,
"acc_norm": 0.22321428571428573,
"acc_norm_stderr": 0.039523019677025116
},
"harness|hendrycksTest-management|5": {
"acc": 0.24271844660194175,
"acc_stderr": 0.042450224863844935,
"acc_norm": 0.24271844660194175,
"acc_norm_stderr": 0.042450224863844935
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.2094017094017094,
"acc_stderr": 0.026655699653922768,
"acc_norm": 0.2094017094017094,
"acc_norm_stderr": 0.026655699653922768
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.13,
"acc_stderr": 0.033799766898963086,
"acc_norm": 0.13,
"acc_norm_stderr": 0.033799766898963086
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.26947637292464877,
"acc_stderr": 0.015866243073215044,
"acc_norm": 0.26947637292464877,
"acc_norm_stderr": 0.015866243073215044
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.27167630057803466,
"acc_stderr": 0.02394851290546835,
"acc_norm": 0.27167630057803466,
"acc_norm_stderr": 0.02394851290546835
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.25251396648044694,
"acc_stderr": 0.014530330201468655,
"acc_norm": 0.25251396648044694,
"acc_norm_stderr": 0.014530330201468655
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.238562091503268,
"acc_stderr": 0.02440439492808787,
"acc_norm": 0.238562091503268,
"acc_norm_stderr": 0.02440439492808787
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.26366559485530544,
"acc_stderr": 0.02502553850053234,
"acc_norm": 0.26366559485530544,
"acc_norm_stderr": 0.02502553850053234
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.28703703703703703,
"acc_stderr": 0.025171041915309684,
"acc_norm": 0.28703703703703703,
"acc_norm_stderr": 0.025171041915309684
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.26595744680851063,
"acc_stderr": 0.02635806569888059,
"acc_norm": 0.26595744680851063,
"acc_norm_stderr": 0.02635806569888059
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.22359843546284225,
"acc_stderr": 0.010641589542841378,
"acc_norm": 0.22359843546284225,
"acc_norm_stderr": 0.010641589542841378
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.34558823529411764,
"acc_stderr": 0.028888193103988644,
"acc_norm": 0.34558823529411764,
"acc_norm_stderr": 0.028888193103988644
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.2549019607843137,
"acc_stderr": 0.01763082737514838,
"acc_norm": 0.2549019607843137,
"acc_norm_stderr": 0.01763082737514838
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.17272727272727273,
"acc_stderr": 0.03620691833929218,
"acc_norm": 0.17272727272727273,
"acc_norm_stderr": 0.03620691833929218
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.22448979591836735,
"acc_stderr": 0.026711430555538408,
"acc_norm": 0.22448979591836735,
"acc_norm_stderr": 0.026711430555538408
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.23383084577114427,
"acc_stderr": 0.029929415408348387,
"acc_norm": 0.23383084577114427,
"acc_norm_stderr": 0.029929415408348387
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.24,
"acc_stderr": 0.04292346959909284,
"acc_norm": 0.24,
"acc_norm_stderr": 0.04292346959909284
},
"harness|hendrycksTest-virology|5": {
"acc": 0.3253012048192771,
"acc_stderr": 0.03647168523683226,
"acc_norm": 0.3253012048192771,
"acc_norm_stderr": 0.03647168523683226
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.21637426900584794,
"acc_stderr": 0.03158149539338734,
"acc_norm": 0.21637426900584794,
"acc_norm_stderr": 0.03158149539338734
},
"harness|truthfulqa:mc|0": {
"mc1": 0.2521419828641371,
"mc1_stderr": 0.015201522246299962,
"mc2": 0.4652388251878066,
"mc2_stderr": 0.015621160279545679
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
linhtran92/random | 2023-09-09T15:43:30.000Z | [
"region:us"
] | linhtran92 | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: id
dtype: string
- name: sentence
dtype: string
- name: intent
dtype: string
- name: sentence_annotation
dtype: string
- name: entities
list:
- name: type
dtype: string
- name: filler
dtype: string
- name: file
dtype: string
- name: audio
struct:
- name: array
sequence: float64
- name: path
dtype: string
- name: sampling_rate
dtype: int64
- name: origin_transcription
dtype: string
- name: sentence_norm
dtype: string
splits:
- name: train
num_bytes: 1085064441.2989166
num_examples: 2094
download_size: 260034262
dataset_size: 1085064441.2989166
---
# Dataset Card for "random"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
khaled123/test | 2023-09-09T15:44:30.000Z | [
"region:us"
] | khaled123 | null | null | null | 0 | 0 | Entry not found |
open-llm-leaderboard/details_PocketDoc__Dans-CreepingSenseOfDoom | 2023-09-09T15:55:20.000Z | [
"region:us"
] | open-llm-leaderboard | null | null | null | 0 | 0 | ---
pretty_name: Evaluation run of PocketDoc/Dans-CreepingSenseOfDoom
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [PocketDoc/Dans-CreepingSenseOfDoom](https://huggingface.co/PocketDoc/Dans-CreepingSenseOfDoom)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 61 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the agregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PocketDoc__Dans-CreepingSenseOfDoom\"\
,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\
\nThese are the [latest results from run 2023-09-09T15:53:59.451307](https://huggingface.co/datasets/open-llm-leaderboard/details_PocketDoc__Dans-CreepingSenseOfDoom/blob/main/results_2023-09-09T15-53-59.451307.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.48295519085486904,\n\
\ \"acc_stderr\": 0.03528377850492319,\n \"acc_norm\": 0.4869816216630256,\n\
\ \"acc_norm_stderr\": 0.035269061698941014,\n \"mc1\": 0.2460220318237454,\n\
\ \"mc1_stderr\": 0.015077219200662587,\n \"mc2\": 0.37836667521939726,\n\
\ \"mc2_stderr\": 0.013889363996367721\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.4948805460750853,\n \"acc_stderr\": 0.014610624890309157,\n\
\ \"acc_norm\": 0.5332764505119454,\n \"acc_norm_stderr\": 0.014578995859605804\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5898227444732125,\n\
\ \"acc_stderr\": 0.0049086047320828115,\n \"acc_norm\": 0.7889862577175861,\n\
\ \"acc_norm_stderr\": 0.004071942209838278\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \
\ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4148148148148148,\n\
\ \"acc_stderr\": 0.04256193767901407,\n \"acc_norm\": 0.4148148148148148,\n\
\ \"acc_norm_stderr\": 0.04256193767901407\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.48026315789473684,\n \"acc_stderr\": 0.040657710025626036,\n\
\ \"acc_norm\": 0.48026315789473684,\n \"acc_norm_stderr\": 0.040657710025626036\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.5,\n\
\ \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \
\ \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.5433962264150943,\n \"acc_stderr\": 0.030656748696739435,\n\
\ \"acc_norm\": 0.5433962264150943,\n \"acc_norm_stderr\": 0.030656748696739435\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3888888888888889,\n\
\ \"acc_stderr\": 0.04076663253918567,\n \"acc_norm\": 0.3888888888888889,\n\
\ \"acc_norm_stderr\": 0.04076663253918567\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \
\ \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n\
\ \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4797687861271676,\n\
\ \"acc_stderr\": 0.03809342081273957,\n \"acc_norm\": 0.4797687861271676,\n\
\ \"acc_norm_stderr\": 0.03809342081273957\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n\
\ \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n\
\ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.4127659574468085,\n \"acc_stderr\": 0.03218471141400352,\n\
\ \"acc_norm\": 0.4127659574468085,\n \"acc_norm_stderr\": 0.03218471141400352\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n\
\ \"acc_stderr\": 0.04266339443159393,\n \"acc_norm\": 0.2894736842105263,\n\
\ \"acc_norm_stderr\": 0.04266339443159393\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.4413793103448276,\n \"acc_stderr\": 0.04137931034482758,\n\
\ \"acc_norm\": 0.4413793103448276,\n \"acc_norm_stderr\": 0.04137931034482758\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.3253968253968254,\n \"acc_stderr\": 0.02413015829976262,\n \"\
acc_norm\": 0.3253968253968254,\n \"acc_norm_stderr\": 0.02413015829976262\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2698412698412698,\n\
\ \"acc_stderr\": 0.03970158273235173,\n \"acc_norm\": 0.2698412698412698,\n\
\ \"acc_norm_stderr\": 0.03970158273235173\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\
\ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6,\n\
\ \"acc_stderr\": 0.027869320571664632,\n \"acc_norm\": 0.6,\n \
\ \"acc_norm_stderr\": 0.027869320571664632\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.3645320197044335,\n \"acc_stderr\": 0.0338640574606209,\n\
\ \"acc_norm\": 0.3645320197044335,\n \"acc_norm_stderr\": 0.0338640574606209\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\"\
: 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.6121212121212121,\n \"acc_stderr\": 0.03804913653971013,\n\
\ \"acc_norm\": 0.6121212121212121,\n \"acc_norm_stderr\": 0.03804913653971013\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.6313131313131313,\n \"acc_stderr\": 0.03437305501980619,\n \"\
acc_norm\": 0.6313131313131313,\n \"acc_norm_stderr\": 0.03437305501980619\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.6580310880829016,\n \"acc_stderr\": 0.034234651001042844,\n\
\ \"acc_norm\": 0.6580310880829016,\n \"acc_norm_stderr\": 0.034234651001042844\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.5128205128205128,\n \"acc_stderr\": 0.02534267129380725,\n \
\ \"acc_norm\": 0.5128205128205128,\n \"acc_norm_stderr\": 0.02534267129380725\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.2518518518518518,\n \"acc_stderr\": 0.026466117538959916,\n \
\ \"acc_norm\": 0.2518518518518518,\n \"acc_norm_stderr\": 0.026466117538959916\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.4957983193277311,\n \"acc_stderr\": 0.03247734334448111,\n \
\ \"acc_norm\": 0.4957983193277311,\n \"acc_norm_stderr\": 0.03247734334448111\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"\
acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.6311926605504588,\n \"acc_stderr\": 0.020686227560729555,\n \"\
acc_norm\": 0.6311926605504588,\n \"acc_norm_stderr\": 0.020686227560729555\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\"\
: 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n\
\ \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.5931372549019608,\n\
\ \"acc_stderr\": 0.03447891136353382,\n \"acc_norm\": 0.5931372549019608,\n\
\ \"acc_norm_stderr\": 0.03447891136353382\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\
: {\n \"acc\": 0.5907172995780591,\n \"acc_stderr\": 0.03200704183359592,\n\
\ \"acc_norm\": 0.5907172995780591,\n \"acc_norm_stderr\": 0.03200704183359592\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.515695067264574,\n\
\ \"acc_stderr\": 0.0335412657542081,\n \"acc_norm\": 0.515695067264574,\n\
\ \"acc_norm_stderr\": 0.0335412657542081\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.5648854961832062,\n \"acc_stderr\": 0.04348208051644858,\n\
\ \"acc_norm\": 0.5648854961832062,\n \"acc_norm_stderr\": 0.04348208051644858\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.6115702479338843,\n \"acc_stderr\": 0.04449270350068382,\n \"\
acc_norm\": 0.6115702479338843,\n \"acc_norm_stderr\": 0.04449270350068382\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6388888888888888,\n\
\ \"acc_stderr\": 0.04643454608906276,\n \"acc_norm\": 0.6388888888888888,\n\
\ \"acc_norm_stderr\": 0.04643454608906276\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.5337423312883436,\n \"acc_stderr\": 0.039194155450484096,\n\
\ \"acc_norm\": 0.5337423312883436,\n \"acc_norm_stderr\": 0.039194155450484096\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.21428571428571427,\n\
\ \"acc_stderr\": 0.038946411200447915,\n \"acc_norm\": 0.21428571428571427,\n\
\ \"acc_norm_stderr\": 0.038946411200447915\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.6990291262135923,\n \"acc_stderr\": 0.04541609446503949,\n\
\ \"acc_norm\": 0.6990291262135923,\n \"acc_norm_stderr\": 0.04541609446503949\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.6709401709401709,\n\
\ \"acc_stderr\": 0.030782321577688166,\n \"acc_norm\": 0.6709401709401709,\n\
\ \"acc_norm_stderr\": 0.030782321577688166\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \
\ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6360153256704981,\n\
\ \"acc_stderr\": 0.017205684809032232,\n \"acc_norm\": 0.6360153256704981,\n\
\ \"acc_norm_stderr\": 0.017205684809032232\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.5578034682080925,\n \"acc_stderr\": 0.026738603643807403,\n\
\ \"acc_norm\": 0.5578034682080925,\n \"acc_norm_stderr\": 0.026738603643807403\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\
\ \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n\
\ \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.5392156862745098,\n \"acc_stderr\": 0.028541722692618874,\n\
\ \"acc_norm\": 0.5392156862745098,\n \"acc_norm_stderr\": 0.028541722692618874\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5755627009646302,\n\
\ \"acc_stderr\": 0.028071928247946205,\n \"acc_norm\": 0.5755627009646302,\n\
\ \"acc_norm_stderr\": 0.028071928247946205\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.4783950617283951,\n \"acc_stderr\": 0.02779476010500873,\n\
\ \"acc_norm\": 0.4783950617283951,\n \"acc_norm_stderr\": 0.02779476010500873\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.2978723404255319,\n \"acc_stderr\": 0.027281608344469417,\n \
\ \"acc_norm\": 0.2978723404255319,\n \"acc_norm_stderr\": 0.027281608344469417\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3963494132985658,\n\
\ \"acc_stderr\": 0.012492830452095219,\n \"acc_norm\": 0.3963494132985658,\n\
\ \"acc_norm_stderr\": 0.012492830452095219\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.5220588235294118,\n \"acc_stderr\": 0.030343264224213528,\n\
\ \"acc_norm\": 0.5220588235294118,\n \"acc_norm_stderr\": 0.030343264224213528\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.4215686274509804,\n \"acc_stderr\": 0.01997742260022747,\n \
\ \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.01997742260022747\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.4818181818181818,\n\
\ \"acc_stderr\": 0.04785964010794917,\n \"acc_norm\": 0.4818181818181818,\n\
\ \"acc_norm_stderr\": 0.04785964010794917\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.5591836734693878,\n \"acc_stderr\": 0.03178419114175363,\n\
\ \"acc_norm\": 0.5591836734693878,\n \"acc_norm_stderr\": 0.03178419114175363\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6616915422885572,\n\
\ \"acc_stderr\": 0.03345563070339192,\n \"acc_norm\": 0.6616915422885572,\n\
\ \"acc_norm_stderr\": 0.03345563070339192\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \
\ \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.42168674698795183,\n\
\ \"acc_stderr\": 0.03844453181770917,\n \"acc_norm\": 0.42168674698795183,\n\
\ \"acc_norm_stderr\": 0.03844453181770917\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.6140350877192983,\n \"acc_stderr\": 0.03733756969066165,\n\
\ \"acc_norm\": 0.6140350877192983,\n \"acc_norm_stderr\": 0.03733756969066165\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2460220318237454,\n\
\ \"mc1_stderr\": 0.015077219200662587,\n \"mc2\": 0.37836667521939726,\n\
\ \"mc2_stderr\": 0.013889363996367721\n }\n}\n```"
repo_url: https://huggingface.co/PocketDoc/Dans-CreepingSenseOfDoom
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|arc:challenge|25_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hellaswag|10_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-09-09T15-53-59.451307.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-management|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-virology|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-09-09T15-53-59.451307.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- '**/details_harness|truthfulqa:mc|0_2023-09-09T15-53-59.451307.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-09-09T15-53-59.451307.parquet'
- config_name: results
data_files:
- split: 2023_09_09T15_53_59.451307
path:
- results_2023-09-09T15-53-59.451307.parquet
- split: latest
path:
- results_2023-09-09T15-53-59.451307.parquet
---
# Dataset Card for Evaluation run of PocketDoc/Dans-CreepingSenseOfDoom
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/PocketDoc/Dans-CreepingSenseOfDoom
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [PocketDoc/Dans-CreepingSenseOfDoom](https://huggingface.co/PocketDoc/Dans-CreepingSenseOfDoom) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_PocketDoc__Dans-CreepingSenseOfDoom",
"harness_truthfulqa_mc_0",
split="train")
```
## Latest results
These are the [latest results from run 2023-09-09T15:53:59.451307](https://huggingface.co/datasets/open-llm-leaderboard/details_PocketDoc__Dans-CreepingSenseOfDoom/blob/main/results_2023-09-09T15-53-59.451307.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.48295519085486904,
"acc_stderr": 0.03528377850492319,
"acc_norm": 0.4869816216630256,
"acc_norm_stderr": 0.035269061698941014,
"mc1": 0.2460220318237454,
"mc1_stderr": 0.015077219200662587,
"mc2": 0.37836667521939726,
"mc2_stderr": 0.013889363996367721
},
"harness|arc:challenge|25": {
"acc": 0.4948805460750853,
"acc_stderr": 0.014610624890309157,
"acc_norm": 0.5332764505119454,
"acc_norm_stderr": 0.014578995859605804
},
"harness|hellaswag|10": {
"acc": 0.5898227444732125,
"acc_stderr": 0.0049086047320828115,
"acc_norm": 0.7889862577175861,
"acc_norm_stderr": 0.004071942209838278
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.27,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.4148148148148148,
"acc_stderr": 0.04256193767901407,
"acc_norm": 0.4148148148148148,
"acc_norm_stderr": 0.04256193767901407
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.48026315789473684,
"acc_stderr": 0.040657710025626036,
"acc_norm": 0.48026315789473684,
"acc_norm_stderr": 0.040657710025626036
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.5433962264150943,
"acc_stderr": 0.030656748696739435,
"acc_norm": 0.5433962264150943,
"acc_norm_stderr": 0.030656748696739435
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.3888888888888889,
"acc_stderr": 0.04076663253918567,
"acc_norm": 0.3888888888888889,
"acc_norm_stderr": 0.04076663253918567
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.47,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.37,
"acc_stderr": 0.04852365870939099,
"acc_norm": 0.37,
"acc_norm_stderr": 0.04852365870939099
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.4797687861271676,
"acc_stderr": 0.03809342081273957,
"acc_norm": 0.4797687861271676,
"acc_norm_stderr": 0.03809342081273957
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.28431372549019607,
"acc_stderr": 0.04488482852329017,
"acc_norm": 0.28431372549019607,
"acc_norm_stderr": 0.04488482852329017
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.56,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.56,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.4127659574468085,
"acc_stderr": 0.03218471141400352,
"acc_norm": 0.4127659574468085,
"acc_norm_stderr": 0.03218471141400352
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.2894736842105263,
"acc_stderr": 0.04266339443159393,
"acc_norm": 0.2894736842105263,
"acc_norm_stderr": 0.04266339443159393
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.4413793103448276,
"acc_stderr": 0.04137931034482758,
"acc_norm": 0.4413793103448276,
"acc_norm_stderr": 0.04137931034482758
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3253968253968254,
"acc_stderr": 0.02413015829976262,
"acc_norm": 0.3253968253968254,
"acc_norm_stderr": 0.02413015829976262
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.2698412698412698,
"acc_stderr": 0.03970158273235173,
"acc_norm": 0.2698412698412698,
"acc_norm_stderr": 0.03970158273235173
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.6,
"acc_stderr": 0.027869320571664632,
"acc_norm": 0.6,
"acc_norm_stderr": 0.027869320571664632
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.3645320197044335,
"acc_stderr": 0.0338640574606209,
"acc_norm": 0.3645320197044335,
"acc_norm_stderr": 0.0338640574606209
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.47,
"acc_stderr": 0.05016135580465919,
"acc_norm": 0.47,
"acc_norm_stderr": 0.05016135580465919
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.6121212121212121,
"acc_stderr": 0.03804913653971013,
"acc_norm": 0.6121212121212121,
"acc_norm_stderr": 0.03804913653971013
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.6313131313131313,
"acc_stderr": 0.03437305501980619,
"acc_norm": 0.6313131313131313,
"acc_norm_stderr": 0.03437305501980619
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.6580310880829016,
"acc_stderr": 0.034234651001042844,
"acc_norm": 0.6580310880829016,
"acc_norm_stderr": 0.034234651001042844
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.5128205128205128,
"acc_stderr": 0.02534267129380725,
"acc_norm": 0.5128205128205128,
"acc_norm_stderr": 0.02534267129380725
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.2518518518518518,
"acc_stderr": 0.026466117538959916,
"acc_norm": 0.2518518518518518,
"acc_norm_stderr": 0.026466117538959916
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.4957983193277311,
"acc_stderr": 0.03247734334448111,
"acc_norm": 0.4957983193277311,
"acc_norm_stderr": 0.03247734334448111
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3708609271523179,
"acc_stderr": 0.03943966699183629,
"acc_norm": 0.3708609271523179,
"acc_norm_stderr": 0.03943966699183629
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.6311926605504588,
"acc_stderr": 0.020686227560729555,
"acc_norm": 0.6311926605504588,
"acc_norm_stderr": 0.020686227560729555
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.4722222222222222,
"acc_stderr": 0.0340470532865388,
"acc_norm": 0.4722222222222222,
"acc_norm_stderr": 0.0340470532865388
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.5931372549019608,
"acc_stderr": 0.03447891136353382,
"acc_norm": 0.5931372549019608,
"acc_norm_stderr": 0.03447891136353382
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.5907172995780591,
"acc_stderr": 0.03200704183359592,
"acc_norm": 0.5907172995780591,
"acc_norm_stderr": 0.03200704183359592
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.515695067264574,
"acc_stderr": 0.0335412657542081,
"acc_norm": 0.515695067264574,
"acc_norm_stderr": 0.0335412657542081
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.5648854961832062,
"acc_stderr": 0.04348208051644858,
"acc_norm": 0.5648854961832062,
"acc_norm_stderr": 0.04348208051644858
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.6115702479338843,
"acc_stderr": 0.04449270350068382,
"acc_norm": 0.6115702479338843,
"acc_norm_stderr": 0.04449270350068382
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.6388888888888888,
"acc_stderr": 0.04643454608906276,
"acc_norm": 0.6388888888888888,
"acc_norm_stderr": 0.04643454608906276
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.5337423312883436,
"acc_stderr": 0.039194155450484096,
"acc_norm": 0.5337423312883436,
"acc_norm_stderr": 0.039194155450484096
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.21428571428571427,
"acc_stderr": 0.038946411200447915,
"acc_norm": 0.21428571428571427,
"acc_norm_stderr": 0.038946411200447915
},
"harness|hendrycksTest-management|5": {
"acc": 0.6990291262135923,
"acc_stderr": 0.04541609446503949,
"acc_norm": 0.6990291262135923,
"acc_norm_stderr": 0.04541609446503949
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.6709401709401709,
"acc_stderr": 0.030782321577688166,
"acc_norm": 0.6709401709401709,
"acc_norm_stderr": 0.030782321577688166
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.6360153256704981,
"acc_stderr": 0.017205684809032232,
"acc_norm": 0.6360153256704981,
"acc_norm_stderr": 0.017205684809032232
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.5578034682080925,
"acc_stderr": 0.026738603643807403,
"acc_norm": 0.5578034682080925,
"acc_norm_stderr": 0.026738603643807403
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.2424581005586592,
"acc_stderr": 0.014333522059217889,
"acc_norm": 0.2424581005586592,
"acc_norm_stderr": 0.014333522059217889
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.5392156862745098,
"acc_stderr": 0.028541722692618874,
"acc_norm": 0.5392156862745098,
"acc_norm_stderr": 0.028541722692618874
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.5755627009646302,
"acc_stderr": 0.028071928247946205,
"acc_norm": 0.5755627009646302,
"acc_norm_stderr": 0.028071928247946205
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.4783950617283951,
"acc_stderr": 0.02779476010500873,
"acc_norm": 0.4783950617283951,
"acc_norm_stderr": 0.02779476010500873
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.2978723404255319,
"acc_stderr": 0.027281608344469417,
"acc_norm": 0.2978723404255319,
"acc_norm_stderr": 0.027281608344469417
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.3963494132985658,
"acc_stderr": 0.012492830452095219,
"acc_norm": 0.3963494132985658,
"acc_norm_stderr": 0.012492830452095219
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.5220588235294118,
"acc_stderr": 0.030343264224213528,
"acc_norm": 0.5220588235294118,
"acc_norm_stderr": 0.030343264224213528
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.4215686274509804,
"acc_stderr": 0.01997742260022747,
"acc_norm": 0.4215686274509804,
"acc_norm_stderr": 0.01997742260022747
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.4818181818181818,
"acc_stderr": 0.04785964010794917,
"acc_norm": 0.4818181818181818,
"acc_norm_stderr": 0.04785964010794917
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.5591836734693878,
"acc_stderr": 0.03178419114175363,
"acc_norm": 0.5591836734693878,
"acc_norm_stderr": 0.03178419114175363
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.6616915422885572,
"acc_stderr": 0.03345563070339192,
"acc_norm": 0.6616915422885572,
"acc_norm_stderr": 0.03345563070339192
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.64,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.64,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-virology|5": {
"acc": 0.42168674698795183,
"acc_stderr": 0.03844453181770917,
"acc_norm": 0.42168674698795183,
"acc_norm_stderr": 0.03844453181770917
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.6140350877192983,
"acc_stderr": 0.03733756969066165,
"acc_norm": 0.6140350877192983,
"acc_norm_stderr": 0.03733756969066165
},
"harness|truthfulqa:mc|0": {
"mc1": 0.2460220318237454,
"mc1_stderr": 0.015077219200662587,
"mc2": 0.37836667521939726,
"mc2_stderr": 0.013889363996367721
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
fahmiaziz/dataset-donut-v1-receipt-200-img | 2023-09-09T16:06:53.000Z | [
"task_categories:feature-extraction",
"task_categories:token-classification",
"size_categories:n<1K",
"language:en",
"license:openrail",
"finance",
"region:us"
] | fahmiaziz | null | null | null | 0 | 0 | ---
language:
- en
license: openrail
size_categories:
- n<1K
task_categories:
- feature-extraction
- token-classification
tags:
- finance
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
dataset_info:
features:
- name: image
dtype: image
- name: ground_truth
dtype: string
splits:
- name: train
num_bytes: 69756517.0
num_examples: 176
- name: validation
num_bytes: 8294088.0
num_examples: 21
- name: test
num_bytes: 4247549.0
num_examples: 11
download_size: 78567537
dataset_size: 82298154.0
---
|
linhtran92/infer_55epoch_onRandom | 2023-09-09T15:55:11.000Z | [
"region:us"
] | linhtran92 | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: sentence
dtype: string
- name: w2v2_baseline_transcription
dtype: string
- name: w2v2_baseline_norm
dtype: string
splits:
- name: train
num_bytes: 412332
num_examples: 2094
download_size: 180633
dataset_size: 412332
---
# Dataset Card for "infer_55epoch_onRandom"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
dongyoung4091/shp_with_features_20k_flan_t5_large_external_rm1_large | 2023-09-09T15:58:32.000Z | [
"region:us"
] | dongyoung4091 | null | null | null | 0 | 0 | Entry not found |
samsonmax/SSDdata | 2023-09-09T17:46:35.000Z | [
"region:us"
] | samsonmax | null | null | null | 0 | 0 | Entry not found |
zongxiao/github-issues-colab | 2023-09-09T16:03:34.000Z | [
"region:us"
] | zongxiao | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: url
dtype: string
- name: repository_url
dtype: string
- name: labels_url
dtype: string
- name: comments_url
dtype: string
- name: events_url
dtype: string
- name: html_url
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: number
dtype: int64
- name: title
dtype: string
- name: user
struct:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: labels
list:
- name: id
dtype: int64
- name: node_id
dtype: string
- name: url
dtype: string
- name: name
dtype: string
- name: color
dtype: string
- name: default
dtype: bool
- name: description
dtype: string
- name: state
dtype: string
- name: locked
dtype: bool
- name: assignee
struct:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: assignees
list:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: milestone
struct:
- name: url
dtype: string
- name: html_url
dtype: string
- name: labels_url
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: number
dtype: int64
- name: title
dtype: string
- name: description
dtype: string
- name: creator
struct:
- name: login
dtype: string
- name: id
dtype: int64
- name: node_id
dtype: string
- name: avatar_url
dtype: string
- name: gravatar_id
dtype: string
- name: url
dtype: string
- name: html_url
dtype: string
- name: followers_url
dtype: string
- name: following_url
dtype: string
- name: gists_url
dtype: string
- name: starred_url
dtype: string
- name: subscriptions_url
dtype: string
- name: organizations_url
dtype: string
- name: repos_url
dtype: string
- name: events_url
dtype: string
- name: received_events_url
dtype: string
- name: type
dtype: string
- name: site_admin
dtype: bool
- name: open_issues
dtype: int64
- name: closed_issues
dtype: int64
- name: state
dtype: string
- name: created_at
dtype: timestamp[s]
- name: updated_at
dtype: timestamp[s]
- name: due_on
dtype: timestamp[s]
- name: closed_at
dtype: timestamp[s]
- name: comments
sequence: string
- name: created_at
dtype: timestamp[s]
- name: updated_at
dtype: timestamp[s]
- name: closed_at
dtype: timestamp[s]
- name: author_association
dtype: string
- name: active_lock_reason
dtype: 'null'
- name: draft
dtype: bool
- name: pull_request
struct:
- name: url
dtype: string
- name: html_url
dtype: string
- name: diff_url
dtype: string
- name: patch_url
dtype: string
- name: merged_at
dtype: timestamp[s]
- name: body
dtype: string
- name: reactions
struct:
- name: url
dtype: string
- name: total_count
dtype: int64
- name: '+1'
dtype: int64
- name: '-1'
dtype: int64
- name: laugh
dtype: int64
- name: hooray
dtype: int64
- name: confused
dtype: int64
- name: heart
dtype: int64
- name: rocket
dtype: int64
- name: eyes
dtype: int64
- name: timeline_url
dtype: string
- name: performed_via_github_app
dtype: 'null'
- name: state_reason
dtype: string
- name: is_pull_request
dtype: bool
splits:
- name: train
num_bytes: 13478052
num_examples: 3624
download_size: 3952655
dataset_size: 13478052
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "github-issues-colab"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
khaled123/test2 | 2023-09-09T16:08:41.000Z | [
"region:us"
] | khaled123 | null | null | null | 0 | 0 | Entry not found |
Junr-syl/Movie_review_instruction_tuned | 2023-09-09T16:39:42.000Z | [
"region:us"
] | Junr-syl | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 30157866
num_examples: 20000
download_size: 17856145
dataset_size: 30157866
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "Movie_review_instruction_tuned"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
zzd0225/CDSet | 2023-09-09T16:24:26.000Z | [
"license:apache-2.0",
"region:us"
] | zzd0225 | null | null | null | 1 | 0 | ---
license: apache-2.0
---
|
parseny/DCS_dataset | 2023-09-09T16:36:43.000Z | [
"region:us"
] | parseny | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: text
dtype: string
- name: labels
dtype: string
splits:
- name: train
num_bytes: 163160222
num_examples: 1145003
download_size: 30946202
dataset_size: 163160222
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "DCS_dataset"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Junr-syl/Movie_review_instruction_tuned_test | 2023-09-09T16:42:37.000Z | [
"region:us"
] | Junr-syl | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 7449957
num_examples: 5000
download_size: 4410730
dataset_size: 7449957
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "Movie_review_instruction_tuned_test"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
deadbits/vigil-instruction-bypass-ada-002 | 2023-09-09T18:29:12.000Z | [
"embeddings",
"text",
"security",
"region:us"
] | deadbits | null | null | null | 0 | 0 | ---
tags:
- embeddings
- text
- security
pretty_name: 'Vigil: LLM Instruction Bypass text-embedding-ada-002 '
---
# Vigil: LLM Instruction Bypass all-MiniLM-L6-v2
- **Repo:** [github.com/deadbits/vigil-llm](https://github.com/deadbits/vigil-llm)
`Vigil` is a Python framework and REST API for assessing Large Language Model (LLM) prompts against a set of scanners to detect prompt injections, jailbreaks, and other potentially risky inputs.
This repository contains `text-embedding-ada-002` embeddings for all Instruction Bypass style prompts ("Ignore instructions ...") used by [Vigil](https://github.com/deadbits/prompt-injection-defense).
You can use the [parquet2vdb.py](https://github.com/deadbits/prompt-injection-defense/blob/main/vigil/utils/parquet2vdb.py) utility to load the embeddings in the Vigil chromadb instance, or use them in your own application.
## Format
```json
[
{
"text": str,
"embedding": [],
"model": "text-embedding-ada-002"
}
]
```
Instruction bypass prompts generated with: https://gist.github.com/deadbits/e93a90aa36c9aa7b5ce1179597a6fe3d#file-generate-phrases-py |
konona5/l4m_datasets | 2023-09-09T17:12:37.000Z | [
"license:creativeml-openrail-m",
"region:us"
] | konona5 | null | null | null | 0 | 0 | ---
license: creativeml-openrail-m
---
|
nodlnodl/kb | 2023-09-09T17:14:11.000Z | [
"region:us"
] | nodlnodl | null | null | null | 0 | 0 | Entry not found |
tayamaken/Monroe | 2023-09-09T17:33:00.000Z | [
"region:us"
] | tayamaken | null | null | null | 0 | 0 | Entry not found |
a8cheng/dataset_smal | 2023-09-23T10:29:03.000Z | [
"region:us"
] | a8cheng | null | null | null | 0 | 0 | Entry not found |
internetsos/drive | 2023-10-08T22:24:09.000Z | [
"region:us"
] | internetsos | null | null | null | 0 | 0 | Entry not found |
IsaacJu666/simple_datasets | 2023-09-09T19:17:03.000Z | [
"license:openrail",
"region:us"
] | IsaacJu666 | null | null | null | 0 | 0 | ---
license: openrail
dataset_info:
features:
- name: id
dtype: string
- name: url
dtype: string
- name: title
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 235060541
num_examples: 205328
download_size: 133886409
dataset_size: 235060541
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
bohdan1/knowd | 2023-09-09T19:36:35.000Z | [
"license:mit",
"region:us"
] | bohdan1 | null | null | null | 0 | 0 | ---
license: mit
---
|
cellos/test1 | 2023-09-09T19:35:34.000Z | [
"license:mit",
"region:us"
] | cellos | null | null | null | 0 | 0 | ---
license: mit
---
|
yzhuang/autotree_pmlb_100000_ring_sgosdt_l256_dim10_d3_sd0 | 2023-09-09T20:16:49.000Z | [
"region:us"
] | yzhuang | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: id
dtype: int64
- name: input_x
sequence:
sequence: float32
- name: input_y
sequence:
sequence: float32
- name: input_y_clean
sequence:
sequence: float32
- name: rtg
sequence: float64
- name: status
sequence:
sequence: float32
- name: split_threshold
sequence:
sequence: float32
- name: split_dimension
sequence: int64
splits:
- name: train
num_bytes: 2364400000
num_examples: 100000
- name: validation
num_bytes: 236440000
num_examples: 10000
download_size: 676989170
dataset_size: 2600840000
---
# Dataset Card for "autotree_pmlb_100000_ring_sgosdt_l256_dim10_d3_sd0"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Hrukanina/ultra-small | 2023-09-09T20:35:06.000Z | [
"region:us"
] | Hrukanina | null | null | null | 0 | 0 | Entry not found |
vikp/hydra_learning_labeled | 2023-09-09T22:21:19.000Z | [
"region:us"
] | vikp | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: unique_conversation_id
dtype: string
- name: rendered
dtype: string
- name: dataset_id
dtype: string
- name: learning_prob
dtype: float64
splits:
- name: train
num_bytes: 4796996141
num_examples: 2527636
download_size: 2492767126
dataset_size: 4796996141
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "hydra_learning_labeled"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Leukoplast/F | 2023-09-09T22:18:44.000Z | [
"region:us"
] | Leukoplast | null | null | null | 0 | 0 | Entry not found |
andersonbcdefg/c4-1000 | 2023-09-09T22:23:08.000Z | [
"region:us"
] | andersonbcdefg | null | null | null | 0 | 0 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: text
dtype: string
- name: timestamp
dtype: string
- name: url
dtype: string
splits:
- name: train
num_bytes: 2303428
num_examples: 1000
download_size: 1435214
dataset_size: 2303428
---
# Dataset Card for "c4-1000"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
HydraLM/code_instructions_standardized | 2023-09-09T23:28:40.000Z | [
"region:us"
] | HydraLM | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: message
dtype: string
- name: message_type
dtype: string
- name: message_id
dtype: int64
- name: conversation_id
dtype: int64
splits:
- name: train
num_bytes: 284806703
num_examples: 272294
download_size: 140857707
dataset_size: 284806703
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "code_instructions_standardized"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Mxa/DeepFake_Binary_Classifier | 2023-09-10T00:22:37.000Z | [
"region:us"
] | Mxa | null | null | null | 0 | 0 | Entry not found |
tiendung/vi_starcoder_raw | 2023-09-10T03:40:30.000Z | [
"region:us"
] | tiendung | null | null | null | 0 | 0 | Entry not found |
ucalyptus/car_embeddings | 2023-09-10T03:22:54.000Z | [
"region:us"
] | ucalyptus | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: '0'
dtype: float32
- name: '1'
dtype: float32
- name: '2'
dtype: float32
- name: '3'
dtype: float32
- name: '4'
dtype: float32
- name: '5'
dtype: float32
- name: '6'
dtype: float32
- name: '7'
dtype: float32
- name: '8'
dtype: float32
- name: '9'
dtype: float32
- name: '10'
dtype: float32
- name: '11'
dtype: float32
- name: '12'
dtype: float32
- name: '13'
dtype: float32
- name: '14'
dtype: float32
- name: '15'
dtype: float32
- name: '16'
dtype: float32
- name: '17'
dtype: float32
- name: '18'
dtype: float32
- name: '19'
dtype: float32
- name: '20'
dtype: float32
- name: '21'
dtype: float32
- name: '22'
dtype: float32
- name: '23'
dtype: float32
- name: '24'
dtype: float32
- name: '25'
dtype: float32
- name: '26'
dtype: float32
- name: '27'
dtype: float32
- name: '28'
dtype: float32
- name: '29'
dtype: float32
- name: '30'
dtype: float32
- name: '31'
dtype: float32
- name: '32'
dtype: float32
- name: '33'
dtype: float32
- name: '34'
dtype: float32
- name: '35'
dtype: float32
- name: '36'
dtype: float32
- name: '37'
dtype: float32
- name: '38'
dtype: float32
- name: '39'
dtype: float32
- name: '40'
dtype: float32
- name: '41'
dtype: float32
- name: '42'
dtype: float32
- name: '43'
dtype: float32
- name: '44'
dtype: float32
- name: '45'
dtype: float32
- name: '46'
dtype: float32
- name: '47'
dtype: float32
- name: '48'
dtype: float32
- name: '49'
dtype: float32
- name: '50'
dtype: float32
- name: '51'
dtype: float32
- name: '52'
dtype: float32
- name: '53'
dtype: float32
- name: '54'
dtype: float32
- name: '55'
dtype: float32
- name: '56'
dtype: float32
- name: '57'
dtype: float32
- name: '58'
dtype: float32
- name: '59'
dtype: float32
- name: '60'
dtype: float32
- name: '61'
dtype: float32
- name: '62'
dtype: float32
- name: '63'
dtype: float32
- name: '64'
dtype: float32
- name: '65'
dtype: float32
- name: '66'
dtype: float32
- name: '67'
dtype: float32
- name: '68'
dtype: float32
- name: '69'
dtype: float32
- name: '70'
dtype: float32
- name: '71'
dtype: float32
- name: '72'
dtype: float32
- name: '73'
dtype: float32
- name: '74'
dtype: float32
- name: '75'
dtype: float32
- name: '76'
dtype: float32
- name: '77'
dtype: float32
- name: '78'
dtype: float32
- name: '79'
dtype: float32
- name: '80'
dtype: float32
- name: '81'
dtype: float32
- name: '82'
dtype: float32
- name: '83'
dtype: float32
- name: '84'
dtype: float32
- name: '85'
dtype: float32
- name: '86'
dtype: float32
- name: '87'
dtype: float32
- name: '88'
dtype: float32
- name: '89'
dtype: float32
- name: '90'
dtype: float32
- name: '91'
dtype: float32
- name: '92'
dtype: float32
- name: '93'
dtype: float32
- name: '94'
dtype: float32
- name: '95'
dtype: float32
- name: '96'
dtype: float32
- name: '97'
dtype: float32
- name: '98'
dtype: float32
- name: '99'
dtype: float32
- name: '100'
dtype: float32
- name: '101'
dtype: float32
- name: '102'
dtype: float32
- name: '103'
dtype: float32
- name: '104'
dtype: float32
- name: '105'
dtype: float32
- name: '106'
dtype: float32
- name: '107'
dtype: float32
- name: '108'
dtype: float32
- name: '109'
dtype: float32
- name: '110'
dtype: float32
- name: '111'
dtype: float32
- name: '112'
dtype: float32
- name: '113'
dtype: float32
- name: '114'
dtype: float32
- name: '115'
dtype: float32
- name: '116'
dtype: float32
- name: '117'
dtype: float32
- name: '118'
dtype: float32
- name: '119'
dtype: float32
- name: '120'
dtype: float32
- name: '121'
dtype: float32
- name: '122'
dtype: float32
- name: '123'
dtype: float32
- name: '124'
dtype: float32
- name: '125'
dtype: float32
- name: '126'
dtype: float32
- name: '127'
dtype: float32
splits:
- name: train
num_bytes: 4169728
num_examples: 8144
download_size: 303332
dataset_size: 4169728
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "car_embeddings"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
samsonmax/halloworld | 2023-09-10T13:53:52.000Z | [
"region:us"
] | samsonmax | null | null | null | 0 | 0 | Entry not found |
Ujito/Mukozo | 2023-09-10T04:03:26.000Z | [
"license:bigscience-openrail-m",
"region:us"
] | Ujito | null | null | null | 0 | 0 | ---
license: bigscience-openrail-m
---
|
heiansstm/13B | 2023-09-10T04:03:48.000Z | [
"license:unknown",
"region:us"
] | heiansstm | null | null | null | 0 | 0 | ---
license: unknown
---
|
P1ayer-1/isbndb-annas-duplicates | 2023-09-10T04:12:44.000Z | [
"region:us"
] | P1ayer-1 | null | null | null | 0 | 0 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: authors
dtype: string
- name: color
sequence: float64
- name: depth
dtype: int64
- name: field
dtype: string
- name: id
dtype: int64
- name: match_count
dtype: int64
- name: position
sequence: float64
- name: title
dtype: string
- name: author_hashes
dtype: string
- name: title_hashes
dtype: string
- name: isbn
sequence: string
- name: isbn13
sequence: string
splits:
- name: train
num_bytes: 15013450
num_examples: 61419
download_size: 9443920
dataset_size: 15013450
---
# Dataset Card for "isbndb-annas-duplicates"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
SkunkworksAI-shared/failed | 2023-09-10T05:03:05.000Z | [
"region:us"
] | SkunkworksAI-shared | null | null | null | 0 | 0 | Entry not found |
YxBxRyXJx/cat_train | 2023-09-25T02:03:35.000Z | [
"license:apache-2.0",
"region:us"
] | YxBxRyXJx | null | null | null | 0 | 0 | ---
license: apache-2.0
---
## このデータベースは猫の飼い方に関するQAをまとめたものです。
インターネット上の英語、日本語の情報をもとに、情報を再編成してつくったものです。
LLMのファインチューニング用に使ってみてください。
コンテキストは英語です。
参考となるブログは[こちら](https://jpnqeur23lmqsw.blogspot.com/2023/09/qeur23llmdss9llm.html)
|
tiri1231/jamisyou-Zunko | 2023-09-10T06:08:33.000Z | [
"region:us"
] | tiri1231 | null | null | null | 0 | 0 | Entry not found |
TeraTTS/raw_nkrja | 2023-09-10T06:21:48.000Z | [
"task_categories:token-classification",
"task_categories:text-classification",
"task_categories:text2text-generation",
"size_categories:10M<n<100M",
"language:ru",
"license:mit",
"region:us"
] | TeraTTS | null | null | null | 0 | 0 | ---
license: mit
task_categories:
- token-classification
- text-classification
- text2text-generation
language:
- ru
size_categories:
- 10M<n<100M
---
Акцентологический подкорпус [Национального Корпуса Русского Языка](https://ruscorpora.ru/) |
galax21/uxen | 2023-09-10T07:00:05.000Z | [
"region:us"
] | galax21 | null | null | null | 0 | 0 | Entry not found |
Introvert696/klavakoka | 2023-09-10T06:58:25.000Z | [
"license:openrail",
"region:us"
] | Introvert696 | null | null | null | 0 | 0 | ---
license: openrail
---
|
CyberHarem/saitou_miyako_oshinoko | 2023-09-17T17:30:21.000Z | [
"task_categories:text-to-image",
"size_categories:n<1K",
"license:mit",
"art",
"not-for-all-audiences",
"region:us"
] | CyberHarem | null | null | null | 0 | 0 | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of Saitou Miyako
This is the dataset of Saitou Miyako, containing 102 images and their tags.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
| Name | Images | Download | Description |
|:------------|---------:|:------------------------------------|:-------------------------------------------------------------------------|
| raw | 102 | [Download](dataset-raw.zip) | Raw data with meta information. |
| raw-stage3 | 220 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. |
| 384x512 | 102 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. |
| 512x512 | 102 | [Download](dataset-512x512.zip) | 512x512 aligned dataset. |
| 512x704 | 102 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. |
| 640x640 | 102 | [Download](dataset-640x640.zip) | 640x640 aligned dataset. |
| 640x880 | 102 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. |
| stage3-640 | 220 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. |
| stage3-800 | 220 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. |
| stage3-1200 | 220 | [Download](dataset-stage3-1200.zip) | 3-stage cropped dataset with the shorter side not exceeding 1200 pixels. |
|
ZeroCool94/INE-Dataset | 2023-09-10T08:09:37.000Z | [
"region:us"
] | ZeroCool94 | null | null | null | 0 | 0 | Entry not found |
SaniyatMushrat/SusathoAI | 2023-09-10T08:25:24.000Z | [
"region:us"
] | SaniyatMushrat | null | null | null | 0 | 0 | Entry not found |
liyuze/race | 2023-09-10T08:43:54.000Z | [
"license:other",
"region:us"
] | liyuze | null | null | null | 0 | 0 | ---
license: other
---
|
CyberHarem/stheno_fgo | 2023-09-17T17:30:27.000Z | [
"task_categories:text-to-image",
"size_categories:n<1K",
"license:mit",
"art",
"not-for-all-audiences",
"region:us"
] | CyberHarem | null | null | null | 0 | 0 | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of stheno (Fate/Grand Order)
This is the dataset of stheno (Fate/Grand Order), containing 200 images and their tags.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
| Name | Images | Download | Description |
|:------------|---------:|:------------------------------------|:-------------------------------------------------------------------------|
| raw | 200 | [Download](dataset-raw.zip) | Raw data with meta information. |
| raw-stage3 | 485 | [Download](dataset-raw-stage3.zip) | 3-stage cropped raw data with meta information. |
| 384x512 | 200 | [Download](dataset-384x512.zip) | 384x512 aligned dataset. |
| 512x512 | 200 | [Download](dataset-512x512.zip) | 512x512 aligned dataset. |
| 512x704 | 200 | [Download](dataset-512x704.zip) | 512x704 aligned dataset. |
| 640x640 | 200 | [Download](dataset-640x640.zip) | 640x640 aligned dataset. |
| 640x880 | 200 | [Download](dataset-640x880.zip) | 640x880 aligned dataset. |
| stage3-640 | 485 | [Download](dataset-stage3-640.zip) | 3-stage cropped dataset with the shorter side not exceeding 640 pixels. |
| stage3-800 | 485 | [Download](dataset-stage3-800.zip) | 3-stage cropped dataset with the shorter side not exceeding 800 pixels. |
| stage3-1200 | 485 | [Download](dataset-stage3-1200.zip) | 3-stage cropped dataset with the shorter side not exceeding 1200 pixels. |
|
OneFly7/llama2-politosphere-fine-tuning-system-prompt_with_definition | 2023-09-10T09:06:02.000Z | [
"region:us"
] | OneFly7 | null | null | null | 0 | 0 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
dataset_info:
features:
- name: text
dtype: string
- name: label_text
dtype: string
splits:
- name: train
num_bytes: 184692
num_examples: 113
- name: validation
num_bytes: 182440
num_examples: 113
download_size: 66387
dataset_size: 367132
---
# Dataset Card for "llama2-politosphere-fine-tuning-system-prompt_with_definition"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
idolt/SDVNColab | 2023-09-10T11:32:35.000Z | [
"region:us"
] | idolt | null | null | null | 0 | 0 | Entry not found |
Ayaz12/smokesscreen | 2023-09-10T11:54:36.000Z | [
"region:us"
] | Ayaz12 | null | null | null | 0 | 0 | Entry not found |
edbeeching/gia-dataset-tokenized-debug | 2023-09-10T19:39:24.000Z | [
"region:us"
] | edbeeching | null | null | null | 0 | 0 | ---
dataset_info:
config_name: atari-alien
features:
- name: input_types
sequence: int64
- name: input_ids
sequence: int32
- name: patches
sequence:
sequence:
sequence:
sequence: uint8
- name: loss_mask
sequence: bool
- name: patch_positions
sequence:
sequence:
sequence: float64
- name: local_positions
sequence: int64
- name: attention_mask
sequence: bool
splits:
- name: test
num_bytes: 442153668
num_examples: 335
- name: train
num_bytes: 381345596
num_examples: 289
download_size: 66896005
dataset_size: 823499264
configs:
- config_name: atari-alien
data_files:
- split: test
path: atari-alien/test-*
- split: train
path: atari-alien/train-*
---
# Dataset Card for "gia-dataset-tokenized-debug"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
cjvt/parlaMintSI | 2023-10-04T17:21:49.000Z | [
"task_categories:other",
"multilinguality:monolingual",
"size_categories:100K<n<1M",
"language:sl",
"license:cc-by-4.0",
"region:us"
] | cjvt | ParlaMint 3.0 is a multilingual set of 26 comparable corpora containing parliamentary debates mostly starting in 2015 and extending to mid-2022.
The corpora have extensive metadata, including aspects of the parliament; the speakers (name, gender, MP status, party affiliation, party coalition/opposition);
are structured into time-stamped terms, sessions and meetings; and with speeches being marked by the speaker and their role (e.g. chair, regular speaker).
The speeches also contain marked-up transcriber comments, such as gaps in the transcription, interruptions, applause, etc.
Note that some corpora have further information, e.g. the year of birth of the speakers, links to their Wikipedia articles, their membership in various committees, etc.
The corpora are also marked to the subcorpus they belong to ("reference", until 2020-01-30, "covid", from 2020-01-31, and "war", from 2022-02-24).
The corpora are encoded according to the Parla-CLARIN TEI recommendation (https://clarin-eric.github.io/parla-clarin/), but have been encoded against the compatible,
but much stricter ParlaMint encoding guidelines (https://clarin-eric.github.io/ParlaMint/) and schemas (included in this distribution).
This entry contains the ParlaMint TEI-encoded corpora with the derived plain text versions of the corpora along with TSV metadata of the speeches.
Also included is the 3.0 release of the data and scripts available at the GitHub repository of the ParlaMint project.
This dataset contains only Slovenian parliamentary debates. | null | null | 0 | 0 | ---
dataset_info:
features:
- name: ID
dtype: string
- name: Title
dtype: string
- name: Date
dtype: string
- name: Body
dtype: string
- name: Term
dtype: string
- name: Session
dtype: string
- name: Meeting
dtype: int32
- name: Sitting
dtype: string
- name: Agenda
dtype: string
- name: Subcorpus
dtype: string
- name: Speaker_role
dtype: string
- name: Speaker_MP
dtype: string
- name: Speaker_Minister
dtype: string
- name: Speaker_party
dtype: string
- name: Speaker_party_name
dtype: string
- name: Party_status
dtype: string
- name: Speaker_name
dtype: string
- name: Speaker_gender
dtype: string
- name: Speaker_birth
dtype: string
- name: text
dtype: string
splits:
- name: train
num_bytes: 555501497
num_examples: 311354
download_size: 327446923
dataset_size: 555501497
license:
- cc-by-4.0
language:
- sl
multilinguality:
- monolingual
task_categories:
- other
size_categories:
- 100K<n<1M
---
# Dataset Card for ParlaMint 3.0
### Dataset Summary
ParlaMint 3.0 is a multilingual set of 26 comparable corpora containing parliamentary debates mostly starting in 2015 and extending to mid-2022, with the individual corpora being between 9 and 125 million words in size.
This dataset contains only Slovenian parliamentary debates.
### Languages
Slovenian.
## Dataset Structure
### Data Instances
A sample instance from the dataset:
```
{
'ID': 'ParlaMint-SI_2022-04-06-SDZ8-Izredna-99.u227',
'Title': 'Minutes of the National Assembly of the Republic of Slovenia, Term 8, Extraordinary Session 99, (06. 04. 2022)',
'Date': '2022-04-06',
'Body': 'Lower house',
'Term': '8',
'Session': '',
'Meeting': 99,
'Sitting': '',
'Agenda': '',
'Subcorpus': 'War',
'Speaker_role': 'Regular',
'Speaker_MP': 'MP',
'Speaker_Minister': '-',
'Speaker_party': 'Levica',
'Speaker_party_name': 'Levica',
'Party_status': 'Opposition',
'Speaker_name': 'Koražija, Boštjan',
'Speaker_gender': 'M',
'Speaker_birth': '1974',
'text': '[[…]]Pa celo poslanec z Prekmurja, no, kaj sem rekel [[…]] [[nemir v dvorani]] Zdaj bodite pa tiho, v redu, okej. No, kot rečeno, gre se za to, da se zaščiti tudi kot Prekmurje samo in tudi takrat se je, ne vemo, kdo in zakaj je širil neke, bom rekel, nebuloze oziroma tudi »fake news« po Prekmurju, v smislu, čez, da Levica želi prepovedati geotermalno energijo oziroma pač samo uporabo, kar ne drži. V Levici smo za geotermalno energijo, smo pa seveda proti [[znak za konec razprave]] in strogo proti frekingu, to kar ste želeli vi doseči prej, ampak ste potem videli, da zaradi glasovanja, ki se je že zgodilo na prejšnji seji, da tega ne boste dosegli in ste tudi morali popustit. In srečen sem za Prekmurje in srečen sem za vzhodno Slovenijo, da smo končno nekaj pametnega naredili. Hvala.\n'
}
```
### Data Fields
- 'ID': Unique identifier for each example;
- 'Title': Title or heading of the parliamentary debate;
- 'Date': The date when the parliamentary debate took place;
- 'Body': The primary chamber or house of the parliamentary assembly in which the debate occurred;
- 'Term': The legislative term or session number during which the debate was conducted;
- 'Session': Specific session or part of the term when the debate was held;
- 'Meeting': Numeric identifier or count of the meeting within a session or term;
- 'Sitting': Particular segment or part of a larger meeting or session;
- 'Agenda': Subset or category of the main corpus to which the record belongs;
- 'Subcorpus': Subset or category of the main corpus to which the record belongs;
- 'Speaker_role': Role or position of the speaker during the debate, e.g., chairperson, main speaker, etc;
- 'Speaker_MP': Indicator if the speaker is a Member of Parliament or not;
- 'Speaker_Minister': Indicator if the speaker is a Minister or holds an executive office;
- 'Speaker_party': Abbreviated code or identifier for the political party of the speaker;
- 'Speaker_party_name': Full name of the political party to which the speaker belongs;
- 'Party_status': The status or standing of the party in the parliamentary assembly, e.g., ruling, opposition, etc;
- 'Speaker_name': Full name of the individual speaking during the debate;
- 'Speaker_gender': Gender of the speaker;
- 'Speaker_birth': Year of birth of the speaker;
- 'text': Transcription of the spoken content during the debate.
## Additional Information
### Dataset Curators
Erjavec, Tomaž ; et al.
### Licensing Information
CC BY 4.0
### Citation Information
```
@misc{11356/1486,
title = {Multilingual comparable corpora of parliamentary debates {ParlaMint} 3.0},
author = {Erjavec, Toma{\v z} and Kopp, Maty{\'a}{\v s} and Ogrodniczuk, Maciej and Osenova, Petya and Fi{\v s}er, Darja and Pirker, Hannes and Wissik, Tanja and Schopper, Daniel and Kirnbauer, Martin and Ljube{\v s}i{\'c}, Nikola and Rupnik, Peter and Mochtak, Michal and Pol, Henk van der and Depoorter, Griet and Simov, Kiril and Grigorova, Vladislava and Grigorov, Ilko and Jongejan, Bart and Haltrup Hansen, Dorte and Navarretta, Costanza and M{\"o}lder, Martin and Kahusk, Neeme and Vider, Kadri and Bel, Nuria and Antiba-Cartazo, Iv{\'a}n and Pisani, Marilina and Zevallos, Rodolfo and Vladu, Adina Ioana and Magari{\~n}os, Carmen and Bardanca, Daniel and Barcala, Mario and Garcia, Marcos and P{\'e}rez Lago, Mar{\'{\i}}a and Garc{\'{\i}}a Louzao, Pedro and Vivel Couso, Ainhoa and V{\'a}zquez Abu{\'{\i}}n, Marta and Garc{\'{\i}}a D{\'{\i}}az, Noelia and Vidal Migu{\'e}ns, Adri{\'a}n and Fern{\'a}ndez Rei, Elisa and Regueira, Xos{\'e} Lu{\'{\i}}s and Diwersy, Sascha and Luxardo, Giancarlo and Coole, Matthew and Rayson, Paul and Nwadukwe, Amanda and Gkoumas, Dimitris and Papavassiliou, Vassilis and Prokopidis, Prokopis and Gavriilidou, Maria and Piperidis, Stelios and Ligeti-Nagy, No{\'e}mi and Jelencsik-M{\'a}tyus, Kinga and Varga, Zs{\'o}fia and Dod{\'e}, R{\'e}ka and Barkarson, Starkaður and Agnoloni, Tommaso and Bartolini, Roberto and Frontini, Francesca and Montemagni, Simonetta and Quochi, Valeria and Venturi, Giulia and Ruisi, Manuela and Marchetti, Carlo and Battistoni, Roberto and Darģis, Roberts and van Heusden, Ruben and Marx, Maarten and Tungland, Lars Magne and Rudolf, Micha{\l} and Nito{\'n}, Bart{\l}omiej and Aires, Jos{\'e} and Mendes, Am{\'a}lia and Cardoso, Aida and Pereira, Rui and Yrj{\"a}n{\"a}inen, V{\"a}in{\"o} and Nor{\'e}n, Fredrik Mohammadi and Magnusson, M{\aa}ns and Jarlbrink, Johan and Meden, Katja and Pan{\v c}ur, Andrej and Ojster{\v s}ek, Mihael and {\c C}{\"o}ltekin, {\c C}a{\u g}r{\i} and Kryvenko, Anna},
url = {http://hdl.handle.net/11356/1486},
note = {Slovenian language resource repository {CLARIN}.{SI}},
copyright = {Creative Commons - Attribution 4.0 International ({CC} {BY} 4.0)},
issn = {2820-4042},
year = {2023}
}
``` |
Zerenidel/couple | 2023-09-10T12:17:08.000Z | [
"region:us"
] | Zerenidel | null | null | null | 0 | 0 | Entry not found |
Photolens/DISC-Med-SFT-en-translated-only-CMeKG | 2023-10-02T19:08:39.000Z | [
"language:en",
"region:us"
] | Photolens | null | null | null | 1 | 0 | ---
dataset_info:
features:
- name: conversations
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 22582540
num_examples: 49920
download_size: 9097397
dataset_size: 22582540
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
language:
- en
---
# Dataset Card for "DISC-Med-SFT-en-translated-only-CMeKG"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
DynamicSuperb/MultiSpeakerDetection_LibriSpeech-TestClean | 2023-09-11T07:37:39.000Z | [
"region:us"
] | DynamicSuperb | null | null | null | 0 | 0 | ---
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
dataset_info:
features:
- name: file
dtype: string
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: instruction
dtype: string
- name: label
dtype: string
- name: utterance 1
dtype: string
- name: utterance 2
dtype: string
- name: utterance 3
dtype: string
- name: utterance 4
dtype: string
- name: utterance 5
dtype: string
splits:
- name: test
num_bytes: 889343528.0
num_examples: 2000
download_size: 707786230
dataset_size: 889343528.0
---
# Dataset Card for "MultiSpeakerDetection_LibriSpeechTestClean"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
kphuang68/cs_zero_speech | 2023-09-10T13:59:28.000Z | [
"license:apache-2.0",
"region:us"
] | kphuang68 | null | null | null | 0 | 0 | ---
license: apache-2.0
---
|
samsonmax/verpelicula | 2023-09-10T14:22:25.000Z | [
"region:us"
] | samsonmax | null | null | null | 0 | 0 | Entry not found |
vikp/hydra_inst_labeled_bad | 2023-09-10T14:29:10.000Z | [
"region:us"
] | vikp | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: unique_conversation_id
dtype: string
- name: rendered
dtype: string
- name: dataset_id
dtype: string
- name: inst_prob
dtype: float64
splits:
- name: train
num_bytes: 90343785.37738979
num_examples: 47604
download_size: 32011958
dataset_size: 90343785.37738979
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "hydra_inst_labeled_bad"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
breadlicker45/Bread-chatbot-dataset-test | 2023-09-10T20:42:22.000Z | [
"task_categories:text-generation",
"size_categories:1M<n<10M",
"region:us"
] | breadlicker45 | null | null | null | 0 | 0 | ---
task_categories:
- text-generation
size_categories:
- 1M<n<10M
---
# Dataset Card for "Bread-chatbot-dataset-test"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
duyduong9htv/review | 2023-09-10T15:33:46.000Z | [
"region:us"
] | duyduong9htv | null | null | null | 0 | 0 | Entry not found |
bongo2112/mulokoziepk-dreambooth-dataset-v2 | 2023-09-10T16:56:47.000Z | [
"region:us"
] | bongo2112 | null | null | null | 0 | 0 | Entry not found |
Taegyuu/test1 | 2023-09-10T15:48:45.000Z | [
"license:unknown",
"region:us"
] | Taegyuu | null | null | null | 0 | 0 | ---
license: unknown
---
|
hzlama/household_chat | 2023-09-26T14:02:03.000Z | [
"size_categories:1K<n<10K",
"source_datasets:original",
"language:en",
"license:apache-2.0",
"region:us"
] | hzlama | null | null | null | 1 | 0 | ---
# Example metadata to be added to a dataset card.
# Full dataset card template at https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md
language:
- en
license: apache-2.0
pretty_name: household_img_chat
size_categories:
- 1K<n<10K
source_datasets:
- original
task_ids:
- img_vqa
# Optional. This part can be used to store the feature types and size of the dataset to be used in python. This can be automatically generated using the datasets-cli.
dataset_info:
features:
- name: id # Example: id
dtype: string # Example: int32
- name: image # Example: text
dtype: string # Example: string
- name: conversations # Example: text
dtype: string # Example: string
# Example for SQuAD:
# - name: id
# dtype: string
# - name: title
# dtype: string
# - name: context
# dtype: string
# - name: question
# dtype: string
# - name: answers
# sequence:
# - name: text
# dtype: string
# - name: answer_start
# dtype: int32
--- |
ClassicGuiwu/guiwu | 2023-09-10T16:45:01.000Z | [
"license:openrail",
"region:us"
] | ClassicGuiwu | null | null | null | 0 | 0 | ---
license: openrail
---
|
DynamicSuperb/MultiSpeakerDetection_VCTK | 2023-09-11T07:44:31.000Z | [
"region:us"
] | DynamicSuperb | null | null | null | 0 | 0 | ---
configs:
- config_name: default
data_files:
- split: test
path: data/test-*
dataset_info:
features:
- name: file
dtype: string
- name: audio
dtype:
audio:
sampling_rate: 16000
- name: instruction
dtype: string
- name: label
dtype: string
- name: utterance 1
dtype: string
- name: utterance 2
dtype: string
- name: utterance 3
dtype: string
- name: utterance 4
dtype: string
- name: utterance 5
dtype: string
splits:
- name: test
num_bytes: 407678216.0
num_examples: 2000
download_size: 380944308
dataset_size: 407678216.0
---
# Dataset Card for "MultiSpeakerDetection_VCTK"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
MelonThink/melonllama01 | 2023-09-11T05:47:46.000Z | [
"region:us"
] | MelonThink | null | null | null | 0 | 0 | Entry not found |
mrbrain404/my_datasets | 2023-09-10T17:04:44.000Z | [
"license:other",
"region:us"
] | mrbrain404 | null | null | null | 0 | 0 | ---
license: other
---
|
davidmart/geocode-addresses | 2023-09-10T17:01:02.000Z | [
"region:us"
] | davidmart | null | null | null | 0 | 0 | Entry not found |
filevich/fact2019 | 2023-09-10T17:50:52.000Z | [
"license:mit",
"region:us"
] | filevich | In this paper we present the second edition of the FACT shared task (Factuality Annotation and Classification
Task), included in IberLEF2020. The main objective of this task is to advance in the study of the factuality of
the events mentioned in texts. This year, the FACT task includes a subtask on event identification in addition
to the factuality classification subtask. We describe the submitted systems as well as the corpus used, which is
the same used in FACT 2019 but extended by adding annotations for nominal events. | @inproceedings{fact2020,
title = "Overview of FACT at IberLEF 2020: Events Detection and Classification",
author = "Rosa, Aiala and Chiruzzo, Luis and Wonsever, Dina and Malcuori, Marisa and Curell, Hortènsia and Castellón, Irene and Vázquez, Gloria and Fernández-Montraveta, Ana and Góngora, Santiago and Alonso, Laura",
booktitle = "Proceedings of the Seventh Conference on Natural Language Learning at {HLT}-{NAACL} 2003",
year = "2020",
url = "https://www.aclweb.org/anthology/W03-0419",
} | null | 0 | 0 | ---
license: mit
---
|
TFMUNIR/users-movies-ratings-28082023 | 2023-09-10T17:58:15.000Z | [
"region:us"
] | TFMUNIR | null | null | null | 0 | 0 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: Película
dtype: string
- name: Año de la película
dtype: int64
- name: Texto 1
dtype: string
- name: Texto 2
dtype: string
- name: Texto 3
dtype: string
- name: Edad
dtype: int64
- name: Calificación
dtype: string
- name: Fecha
dtype: string
- name: Emoción texto 1
dtype: string
- name: Emoción texto 2
dtype: string
- name: Emoción texto 3
dtype: string
- name: Promedio emociones textos
dtype: string
- name: Suma promedio emociones textos
dtype: float64
- name: Emociones equilibradas
dtype: string
- name: Suma emociones equilibradas
dtype: float64
- name: Emociones películas
dtype: string
- name: Suma emociones películas
dtype: float64
- name: Score de recomendaciones
dtype: float64
- name: Emoción dominante textos
dtype: float64
- name: Emoción dominante equilibradas
dtype: float64
- name: Emoción dominante películas
dtype: float64
splits:
- name: train
num_bytes: 199996
num_examples: 188
download_size: 56295
dataset_size: 199996
---
# Dataset Card for "users-movies-qualifications-28082023"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
bongo2112/mulokoziepk-swappedFinal-v1 | 2023-09-10T18:56:14.000Z | [
"region:us"
] | bongo2112 | null | null | null | 0 | 0 | Entry not found |
fazemasta/lh | 2023-09-10T18:55:42.000Z | [
"region:us"
] | fazemasta | null | null | null | 0 | 0 | Entry not found |
tanooki426/AngieThompson | 2023-09-10T19:07:09.000Z | [
"license:openrail",
"region:us"
] | tanooki426 | null | null | null | 0 | 0 | ---
license: openrail
---
|
arcadiaskintagremover-us/arcadiaskintagremover | 2023-09-10T20:00:34.000Z | [
"license:afl-3.0",
"region:us"
] | arcadiaskintagremover-us | null | null | null | 0 | 0 | ---
license: afl-3.0
---
|
ayushoj/csv | 2023-09-10T19:35:12.000Z | [
"license:openrail",
"region:us"
] | ayushoj | null | null | null | 0 | 0 | ---
license: openrail
---
|
edbeeching/gia-dataset-tokenized-debug2 | 2023-09-10T19:44:03.000Z | [
"region:us"
] | edbeeching | null | null | null | 0 | 0 | ---
dataset_info:
config_name: atari-alien
features:
- name: input_types
sequence: int64
- name: input_ids
sequence: int32
- name: patches
sequence:
sequence:
sequence:
sequence: uint8
- name: loss_mask
sequence: bool
- name: patch_positions
sequence:
sequence:
sequence: float64
- name: local_positions
sequence: int64
- name: attention_mask
sequence: bool
splits:
- name: test
num_bytes: 442153668
num_examples: 335
download_size: 35972017
dataset_size: 442153668
configs:
- config_name: atari-alien
data_files:
- split: test
path: atari-alien/test-*
---
# Dataset Card for "gia-dataset-tokenized-debug2"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
bongo2112/mulokoziepk-joyFace-v1 | 2023-09-11T00:31:14.000Z | [
"region:us"
] | bongo2112 | null | null | null | 0 | 0 | Entry not found |
argilla/squad_v2 | 2023-09-10T20:50:44.000Z | [
"size_categories:10K<n<100K",
"rlfh",
"argilla",
"human-feedback",
"region:us"
] | argilla | null | null | null | 0 | 0 | ---
size_categories: 10K<n<100K
tags:
- rlfh
- argilla
- human-feedback
---
# Dataset Card for squad_v2
This dataset has been created with [Argilla](https://docs.argilla.io).
As shown in the sections below, this dataset can be loaded into Argilla as explained in [Load with Argilla](#load-with-argilla), or used directly with the `datasets` library in [Load with `datasets`](#load-with-datasets).
## Dataset Description
- **Homepage:** https://argilla.io
- **Repository:** https://github.com/argilla-io/argilla
- **Paper:**
- **Leaderboard:**
- **Point of Contact:**
### Dataset Summary
This dataset contains:
* A dataset configuration file conforming to the Argilla dataset format named `argilla.yaml`. This configuration file will be used to configure the dataset when using the `FeedbackDataset.from_huggingface` method in Argilla.
* Dataset records in a format compatible with HuggingFace `datasets`. These records will be loaded automatically when using `FeedbackDataset.from_huggingface` and can be loaded independently using the `datasets` library via `load_dataset`.
* The [annotation guidelines](#annotation-guidelines) that have been used for building and curating the dataset, if they've been defined in Argilla.
### Load with Argilla
To load with Argilla, you'll just need to install Argilla as `pip install argilla --upgrade` and then use the following code:
```python
import argilla as rg
ds = rg.FeedbackDataset.from_huggingface("argilla/squad_v2")
```
### Load with `datasets`
To load this dataset with `datasets`, you'll just need to install `datasets` as `pip install datasets --upgrade` and then use the following code:
```python
from datasets import load_dataset
ds = load_dataset("argilla/squad_v2")
```
### Supported Tasks and Leaderboards
This dataset can contain [multiple fields, questions and responses](https://docs.argilla.io/en/latest/guides/llms/conceptual_guides/data_model.html) so it can be used for different NLP tasks, depending on the configuration. The dataset structure is described in the [Dataset Structure section](#dataset-structure).
There are no leaderboards associated with this dataset.
### Languages
[More Information Needed]
## Dataset Structure
### Data in Argilla
The dataset is created in Argilla with: **fields**, **questions**, **suggestions**, and **guidelines**.
The **fields** are the dataset records themselves, for the moment just text fields are suppported. These are the ones that will be used to provide responses to the questions.
| Field Name | Title | Type | Required | Markdown |
| ---------- | ----- | ---- | -------- | -------- |
| question | Question | TextField | True | False |
| context | Context | TextField | True | False |
The **questions** are the questions that will be asked to the annotators. They can be of different types, such as rating, text, single choice, or multiple choice.
| Question Name | Title | Type | Required | Description | Values/Labels |
| ------------- | ----- | ---- | -------- | ----------- | ------------- |
| answer | Answer | TextQuestion | True | N/A | N/A |
**✨ NEW** Additionally, we also have **suggestions**, which are linked to the existing questions, and so on, named appending "-suggestion" and "-suggestion-metadata" to those, containing the value/s of the suggestion and its metadata, respectively. So on, the possible values are the same as in the table above.
Finally, the **guidelines** are just a plain string that can be used to provide instructions to the annotators. Find those in the [annotation guidelines](#annotation-guidelines) section.
### Data Instances
An example of a dataset instance in Argilla looks as follows:
```json
{
"fields": {
"context": "Beyonc\u00e9 Giselle Knowles-Carter (/bi\u02d0\u02c8j\u0252nse\u026a/ bee-YON-say) (born September 4, 1981) is an American singer, songwriter, record producer and actress. Born and raised in Houston, Texas, she performed in various singing and dancing competitions as a child, and rose to fame in the late 1990s as lead singer of R\u0026B girl-group Destiny\u0027s Child. Managed by her father, Mathew Knowles, the group became one of the world\u0027s best-selling girl groups of all time. Their hiatus saw the release of Beyonc\u00e9\u0027s debut album, Dangerously in Love (2003), which established her as a solo artist worldwide, earned five Grammy Awards and featured the Billboard Hot 100 number-one singles \"Crazy in Love\" and \"Baby Boy\".",
"question": "When did Beyonce start becoming popular?"
},
"metadata": {
"split": "train"
},
"responses": [
{
"status": "submitted",
"values": {
"answer": {
"value": "in the late 1990s"
}
}
}
],
"suggestions": []
}
```
While the same record in HuggingFace `datasets` looks as follows:
```json
{
"answer": [
{
"status": "submitted",
"user_id": null,
"value": "in the late 1990s"
}
],
"answer-suggestion": null,
"answer-suggestion-metadata": {
"agent": null,
"score": null,
"type": null
},
"context": "Beyonc\u00e9 Giselle Knowles-Carter (/bi\u02d0\u02c8j\u0252nse\u026a/ bee-YON-say) (born September 4, 1981) is an American singer, songwriter, record producer and actress. Born and raised in Houston, Texas, she performed in various singing and dancing competitions as a child, and rose to fame in the late 1990s as lead singer of R\u0026B girl-group Destiny\u0027s Child. Managed by her father, Mathew Knowles, the group became one of the world\u0027s best-selling girl groups of all time. Their hiatus saw the release of Beyonc\u00e9\u0027s debut album, Dangerously in Love (2003), which established her as a solo artist worldwide, earned five Grammy Awards and featured the Billboard Hot 100 number-one singles \"Crazy in Love\" and \"Baby Boy\".",
"external_id": null,
"metadata": "{\"split\": \"train\"}",
"question": "When did Beyonce start becoming popular?"
}
```
### Data Fields
Among the dataset fields, we differentiate between the following:
* **Fields:** These are the dataset records themselves, for the moment just text fields are suppported. These are the ones that will be used to provide responses to the questions.
* **question** is of type `TextField`.
* **context** is of type `TextField`.
* **Questions:** These are the questions that will be asked to the annotators. They can be of different types, such as `RatingQuestion`, `TextQuestion`, `LabelQuestion`, `MultiLabelQuestion`, and `RankingQuestion`.
* **answer** is of type `TextQuestion`.
* **✨ NEW** **Suggestions:** As of Argilla 1.13.0, the suggestions have been included to provide the annotators with suggestions to ease or assist during the annotation process. Suggestions are linked to the existing questions, are always optional, and contain not just the suggestion itself, but also the metadata linked to it, if applicable.
* (optional) **answer-suggestion** is of type `text`.
Additionally, we also have one more field which is optional and is the following:
* **external_id:** This is an optional field that can be used to provide an external ID for the dataset record. This can be useful if you want to link the dataset record to an external resource, such as a database or a file.
### Data Splits
The dataset contains a single split, which is `train`.
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation guidelines
[More Information Needed]
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
nt3awnou/review_requests | 2023-09-14T15:42:46.000Z | [
"region:us"
] | nt3awnou | null | null | null | 0 | 0 | ---
pretty_name: review requests
---
This dataset contains the submissions for reviewing requests in the [**Nt3awnou Map**](https://huggingface.co/spaces/nt3awnou/Nt3awnou-rescue-map). Each request is a single file containing the id and review reason given by the user. |
edbeeching/gia-dataset-tokenized-debug3 | 2023-09-11T06:41:08.000Z | [
"region:us"
] | edbeeching | null | null | null | 0 | 0 | ---
dataset_info:
- config_name: atari-alien
features:
- name: patch_positions
sequence:
sequence:
sequence: float64
- name: loss_mask
sequence: bool
- name: patches
sequence:
sequence:
sequence:
sequence: uint8
- name: input_ids
sequence: int32
- name: local_positions
sequence: int64
- name: input_types
sequence: int64
- name: attention_mask
sequence: bool
splits:
- name: test
num_bytes: 529255424
num_examples: 400
- name: train
num_bytes: 381345596
num_examples: 289
download_size: 73985248
dataset_size: 910601020
- config_name: atari-breakout
features:
- name: patch_positions
sequence:
sequence:
sequence: float64
- name: loss_mask
sequence: bool
- name: patches
sequence:
sequence:
sequence:
sequence: uint8
- name: input_ids
sequence: int32
- name: local_positions
sequence: int64
- name: input_types
sequence: int64
- name: attention_mask
sequence: bool
splits:
- name: test
num_bytes: 834912548
num_examples: 631
- name: train
num_bytes: 472269128
num_examples: 358
download_size: 41674307
dataset_size: 1307181676
- config_name: mujoco-ant
features:
- name: input_types
sequence: int64
- name: loss_mask
sequence: bool
- name: input_ids
sequence: int32
- name: local_positions
sequence: int64
- name: attention_mask
sequence: bool
splits:
- name: test
num_bytes: 2059688
num_examples: 98
- name: train
num_bytes: 2837132
num_examples: 135
download_size: 320515
dataset_size: 4896820
configs:
- config_name: atari-alien
data_files:
- split: test
path: atari-alien/test-*
- split: train
path: atari-alien/train-*
- config_name: atari-breakout
data_files:
- split: test
path: atari-breakout/test-*
- split: train
path: atari-breakout/train-*
- config_name: mujoco-ant
data_files:
- split: test
path: mujoco-ant/test-*
- split: train
path: mujoco-ant/train-*
---
# Dataset Card for "gia-dataset-tokenized-debug3"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
ontocord/minipile_labse_embeddings | 2023-09-10T21:35:43.000Z | [
"region:us"
] | ontocord | null | null | null | 0 | 0 | Entry not found |
hynky/czech-justice-summ-alpaca-short | 2023-09-10T21:25:17.000Z | [
"region:us"
] | hynky | null | null | null | 0 | 0 | ---
dataset_info:
features:
- name: output
dtype: string
- name: instruction
dtype: string
splits:
- name: train
num_bytes: 4856971
num_examples: 2015
download_size: 2389930
dataset_size: 4856971
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "czech-justice-summ-alpaca-short"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
ituhgeura/onlinemegaespanol | 2023-09-10T21:46:23.000Z | [
"region:us"
] | ituhgeura | null | null | null | 0 | 0 | Entry not found |
sinanisler/StableDiffusion | 2023-09-10T22:07:39.000Z | [
"region:us"
] | sinanisler | null | null | null | 1 | 0 | # Stable Diffusion
Stable Diffusion is an exciting new AI system that is changing the world of generative art and media creation. Developed by the smart folks at Anthropic, Stable Diffusion uses an innovative deep learning technique called diffusion models to generate high-quality, photorealistic images and art from simple text descriptions.
Now, previous AI image generators were cool, but they didn't quite capture the nuanced details and coherence of human-created art. Stable Diffusion blows those out of the water. It can take a text prompt and generate all kinds of diverse, intricate images that look like they came right out of an artist's imagination.
What makes [Stable Diffusion](https://stable-diffusion.app/ "Stable Diffusion") so game-changing? A few key features:
- The images look incredibly realistic and detailed, unlike anything AI has been able to produce before. We're talking artwork that could fool your eye into thinking a human painted it.
- Users have fine-grained control over the image generation process. You can guide Stable Diffusion with text and example images to iterate and edit quickly.
- This thing is versatile, capable of producing stunning illustrations, concept art, paintings - you name it. The possibilities are endless.
- Best of all, it's free and open source. This means a thriving community of creative programmers and artists are already building tools and apps on top of Stable Diffusion, pushing what's possible with AI art.
With continuous improvements, it's clear that Stable Diffusion represents a leap forward for AI and generative art. It gives us a glimpse into the future, where text-to-image systems empower unlimited creativity and change how we illustrate, design and distribute visual media. I don't know about you, but I think that future is looking pretty bright!
|
Freakscode/Animals | 2023-09-10T22:07:40.000Z | [
"license:other",
"region:us"
] | Freakscode | null | null | null | 0 | 0 | ---
license: other
---
|
jamesliu23/simpsons | 2023-09-11T02:38:39.000Z | [
"region:us"
] | jamesliu23 | null | null | null | 0 | 0 | Entry not found |
arcadiaskintagremover-us/ArcadiaSkinTagRemoverReviews | 2023-09-10T22:23:01.000Z | [
"license:openrail",
"region:us"
] | arcadiaskintagremover-us | null | null | null | 0 | 0 | ---
license: openrail
---
|
crystal-technologies/CircumSpect | 2023-09-12T21:18:22.000Z | [
"region:us"
] | crystal-technologies | null | null | null | 0 | 0 | Entry not found |
davidggphy/gtzan_all_preprocessed | 2023-09-10T23:06:42.000Z | [
"region:us"
] | davidggphy | null | null | null | 0 | 0 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
dataset_info:
features:
- name: label
dtype:
class_label:
names:
'0': blues
'1': classical
'2': country
'3': disco
'4': hiphop
'5': jazz
'6': metal
'7': pop
'8': reggae
'9': rock
- name: input_values
sequence: float32
- name: attention_mask
sequence: int32
splits:
- name: train
num_bytes: 3452159816
num_examples: 899
- name: test
num_bytes: 384000696
num_examples: 100
download_size: 1923103923
dataset_size: 3836160512
---
# Dataset Card for "gtzan_all_preprocessed"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Taehun81/testDataset | 2023-09-11T00:26:56.000Z | [
"license:cc-by-4.0",
"region:us"
] | Taehun81 | null | null | null | 0 | 0 | ---
license: cc-by-4.0
---
|
amitrajitbh1/communities_unproc | 2023-09-11T00:32:26.000Z | [
"region:us"
] | amitrajitbh1 | null | null | null | 0 | 0 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: author
dtype: string
- name: body
dtype: string
- name: normalizedBody
dtype: string
- name: subreddit
dtype: string
- name: subreddit_id
dtype: string
- name: id
dtype: string
- name: content
dtype: string
- name: summary
dtype: string
splits:
- name: train
num_bytes: 2450236670.538612
num_examples: 497952
download_size: 1497430442
dataset_size: 2450236670.538612
---
# Dataset Card for "communities_unproc"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
amitness/logits-mt-en-512 | 2023-09-11T11:59:13.000Z | [
"region:us"
] | amitness | null | null | null | 0 | 0 | Entry not found |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.