datasetId stringlengths 2 117 | card stringlengths 19 1.01M |
|---|---|
sasha/prof_images_blip__stabilityai-stable-diffusion-2 | ---
dataset_info:
features:
- name: images
dtype: image
- name: embeddings
sequence: float32
splits:
- name: courier
num_bytes: 4257204.0
num_examples: 100
- name: aide
num_bytes: 3491678.0
num_examples: 100
- name: police_officer
num_bytes: 4252502.0
num_examples: 100
- name: purchasing_agent
num_bytes: 5285492.0
num_examples: 100
- name: metal_worker
num_bytes: 5097159.0
num_examples: 100
- name: financial_analyst
num_bytes: 5261660.0
num_examples: 100
- name: stocker
num_bytes: 5700830.0
num_examples: 100
- name: it_specialist
num_bytes: 5731031.0
num_examples: 100
- name: writer
num_bytes: 4309299.0
num_examples: 100
- name: accountant
num_bytes: 4689435.0
num_examples: 100
- name: coach
num_bytes: 4537870.0
num_examples: 100
- name: painter
num_bytes: 3686294.0
num_examples: 100
- name: real_estate_broker
num_bytes: 5241610.0
num_examples: 100
- name: truck_driver
num_bytes: 4955237.0
num_examples: 100
- name: data_entry_keyer
num_bytes: 6112746.0
num_examples: 100
- name: computer_support_specialist
num_bytes: 5187781.0
num_examples: 100
- name: cook
num_bytes: 3905039.0
num_examples: 100
- name: interior_designer
num_bytes: 4970044.0
num_examples: 100
- name: nutritionist
num_bytes: 4904269.0
num_examples: 100
- name: designer
num_bytes: 5618928.0
num_examples: 100
- name: maid
num_bytes: 3653112.0
num_examples: 100
- name: producer
num_bytes: 5532083.0
num_examples: 100
- name: executive_assistant
num_bytes: 5181631.0
num_examples: 100
- name: logistician
num_bytes: 5623819.0
num_examples: 100
- name: tractor_operator
num_bytes: 5566239.0
num_examples: 100
- name: doctor
num_bytes: 3922429.0
num_examples: 100
- name: inventory_clerk
num_bytes: 5675119.0
num_examples: 100
- name: sheet_metal_worker
num_bytes: 4932393.0
num_examples: 100
- name: groundskeeper
num_bytes: 5624913.0
num_examples: 100
- name: electrical_engineer
num_bytes: 5486843.0
num_examples: 100
- name: physical_therapist
num_bytes: 4416383.0
num_examples: 100
- name: insurance_agent
num_bytes: 4503029.0
num_examples: 100
- name: aerospace_engineer
num_bytes: 5005814.0
num_examples: 100
- name: psychologist
num_bytes: 4751138.0
num_examples: 100
- name: financial_advisor
num_bytes: 4616805.0
num_examples: 100
- name: printing_press_operator
num_bytes: 4885677.0
num_examples: 100
- name: architect
num_bytes: 4694972.0
num_examples: 100
- name: dental_hygienist
num_bytes: 4051984.0
num_examples: 100
- name: artist
num_bytes: 4093686.0
num_examples: 100
- name: office_worker
num_bytes: 4984173.0
num_examples: 100
- name: ceo
num_bytes: 4753603.0
num_examples: 100
- name: taxi_driver
num_bytes: 4839205.0
num_examples: 100
- name: librarian
num_bytes: 5209270.0
num_examples: 100
- name: author
num_bytes: 4326443.0
num_examples: 100
- name: plumber
num_bytes: 5004142.0
num_examples: 100
- name: construction_worker
num_bytes: 5173177.0
num_examples: 100
- name: clergy
num_bytes: 3852512.0
num_examples: 100
- name: electrician
num_bytes: 5239521.0
num_examples: 100
- name: jailer
num_bytes: 5032189.0
num_examples: 100
- name: credit_counselor
num_bytes: 4814481.0
num_examples: 100
- name: scientist
num_bytes: 4363783.0
num_examples: 100
- name: drywall_installer
num_bytes: 4174819.0
num_examples: 100
- name: school_bus_driver
num_bytes: 4998022.0
num_examples: 100
- name: dental_assistant
num_bytes: 4140296.0
num_examples: 100
- name: fitness_instructor
num_bytes: 4416504.0
num_examples: 100
- name: detective
num_bytes: 4583678.0
num_examples: 100
- name: hairdresser
num_bytes: 4463307.0
num_examples: 100
- name: welder
num_bytes: 4918374.0
num_examples: 100
- name: pharmacy_technician
num_bytes: 4661790.0
num_examples: 100
- name: compliance_officer
num_bytes: 4845349.0
num_examples: 100
- name: singer
num_bytes: 4638247.0
num_examples: 100
- name: tutor
num_bytes: 3678185.0
num_examples: 100
- name: language_pathologist
num_bytes: 5254361.0
num_examples: 100
- name: medical_records_specialist
num_bytes: 5634506.0
num_examples: 100
- name: sales_manager
num_bytes: 5056132.0
num_examples: 100
- name: industrial_engineer
num_bytes: 5172786.0
num_examples: 100
- name: manager
num_bytes: 4931846.0
num_examples: 100
- name: mechanic
num_bytes: 4613784.0
num_examples: 100
- name: postal_worker
num_bytes: 4441299.0
num_examples: 100
- name: computer_systems_analyst
num_bytes: 6079263.0
num_examples: 100
- name: salesperson
num_bytes: 5024667.0
num_examples: 100
- name: office_clerk
num_bytes: 4755914.0
num_examples: 100
- name: claims_appraiser
num_bytes: 4478395.0
num_examples: 100
- name: security_guard
num_bytes: 5230558.0
num_examples: 100
- name: interviewer
num_bytes: 4656965.0
num_examples: 100
- name: dispatcher
num_bytes: 5291584.0
num_examples: 100
- name: lawyer
num_bytes: 4277664.0
num_examples: 100
- name: marketing_manager
num_bytes: 5252505.0
num_examples: 100
- name: customer_service_representative
num_bytes: 4896010.0
num_examples: 100
- name: software_developer
num_bytes: 5547030.0
num_examples: 100
- name: mover
num_bytes: 4568118.0
num_examples: 100
- name: supervisor
num_bytes: 4923186.0
num_examples: 100
- name: paralegal
num_bytes: 4687033.0
num_examples: 100
- name: graphic_designer
num_bytes: 5759315.0
num_examples: 100
- name: dentist
num_bytes: 3970331.0
num_examples: 100
- name: roofer
num_bytes: 5495073.0
num_examples: 100
- name: public_relations_specialist
num_bytes: 5073442.0
num_examples: 100
- name: engineer
num_bytes: 4250666.0
num_examples: 100
- name: occupational_therapist
num_bytes: 3855692.0
num_examples: 100
- name: manicurist
num_bytes: 4064118.0
num_examples: 100
- name: cleaner
num_bytes: 5095971.0
num_examples: 100
- name: facilities_manager
num_bytes: 5247879.0
num_examples: 100
- name: repair_worker
num_bytes: 5113432.0
num_examples: 100
- name: cashier
num_bytes: 5215203.0
num_examples: 100
- name: baker
num_bytes: 4610706.0
num_examples: 100
- name: market_research_analyst
num_bytes: 5593811.0
num_examples: 100
- name: health_technician
num_bytes: 4477836.0
num_examples: 100
- name: veterinarian
num_bytes: 4197438.0
num_examples: 100
- name: underwriter
num_bytes: 4662113.0
num_examples: 100
- name: mechanical_engineer
num_bytes: 5722417.0
num_examples: 100
- name: janitor
num_bytes: 5390408.0
num_examples: 100
- name: pilot
num_bytes: 3829685.0
num_examples: 100
- name: therapist
num_bytes: 3859109.0
num_examples: 100
- name: director
num_bytes: 4096085.0
num_examples: 100
- name: wholesale_buyer
num_bytes: 6300854.0
num_examples: 100
- name: air_conditioning_installer
num_bytes: 5741602.0
num_examples: 100
- name: butcher
num_bytes: 5332426.0
num_examples: 100
- name: machinery_mechanic
num_bytes: 5057875.0
num_examples: 100
- name: event_planner
num_bytes: 5039870.0
num_examples: 100
- name: carpet_installer
num_bytes: 5903738.0
num_examples: 100
- name: musician
num_bytes: 4431405.0
num_examples: 100
- name: civil_engineer
num_bytes: 5463535.0
num_examples: 100
- name: farmer
num_bytes: 4772114.0
num_examples: 100
- name: financial_manager
num_bytes: 5187071.0
num_examples: 100
- name: childcare_worker
num_bytes: 4326077.0
num_examples: 100
- name: clerk
num_bytes: 4268877.0
num_examples: 100
- name: machinist
num_bytes: 4593820.0
num_examples: 100
- name: firefighter
num_bytes: 4414229.0
num_examples: 100
- name: photographer
num_bytes: 4056745.0
num_examples: 100
- name: file_clerk
num_bytes: 5925494.0
num_examples: 100
- name: bus_driver
num_bytes: 4714778.0
num_examples: 100
- name: fast_food_worker
num_bytes: 4778503.0
num_examples: 100
- name: bartender
num_bytes: 5407515.0
num_examples: 100
- name: computer_programmer
num_bytes: 5961791.0
num_examples: 100
- name: pharmacist
num_bytes: 5103862.0
num_examples: 100
- name: nursing_assistant
num_bytes: 3969985.0
num_examples: 100
- name: career_counselor
num_bytes: 4578703.0
num_examples: 100
- name: mental_health_counselor
num_bytes: 4915411.0
num_examples: 100
- name: network_administrator
num_bytes: 6576986.0
num_examples: 100
- name: teacher
num_bytes: 4112341.0
num_examples: 100
- name: dishwasher
num_bytes: 4582691.0
num_examples: 100
- name: teller
num_bytes: 4580341.0
num_examples: 100
- name: teaching_assistant
num_bytes: 4310141.0
num_examples: 100
- name: payroll_clerk
num_bytes: 5104922.0
num_examples: 100
- name: laboratory_technician
num_bytes: 4772940.0
num_examples: 100
- name: social_assistant
num_bytes: 4975461.0
num_examples: 100
- name: radiologic_technician
num_bytes: 4614401.0
num_examples: 100
- name: social_worker
num_bytes: 4143912.0
num_examples: 100
- name: nurse
num_bytes: 3251197.0
num_examples: 100
- name: receptionist
num_bytes: 4962877.0
num_examples: 100
- name: carpenter
num_bytes: 4367834.0
num_examples: 100
- name: correctional_officer
num_bytes: 5027428.0
num_examples: 100
- name: community_manager
num_bytes: 5327391.0
num_examples: 100
- name: massage_therapist
num_bytes: 4309573.0
num_examples: 100
- name: head_cook
num_bytes: 4488723.0
num_examples: 100
- name: plane_mechanic
num_bytes: 4650683.0
num_examples: 100
download_size: 729196101
dataset_size: 705285705.0
---
# Dataset Card for "prof_images_blip__stabilityai-stable-diffusion-2"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
open-llm-leaderboard/details_JosephusCheung__Guanaco | ---
pretty_name: Evaluation run of JosephusCheung/Guanaco
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [JosephusCheung/Guanaco](https://huggingface.co/JosephusCheung/Guanaco) on the\
\ [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 3 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the agregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_JosephusCheung__Guanaco\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-09-23T06:44:02.813633](https://huggingface.co/datasets/open-llm-leaderboard/details_JosephusCheung__Guanaco/blob/main/results_2023-09-23T06-44-02.813633.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.23343120805369127,\n\
\ \"em_stderr\": 0.004332062137833453,\n \"f1\": 0.2960843120805377,\n\
\ \"f1_stderr\": 0.004351433413685765,\n \"acc\": 0.34333070244672453,\n\
\ \"acc_stderr\": 0.006518256048373988\n },\n \"harness|drop|3\": {\n\
\ \"em\": 0.23343120805369127,\n \"em_stderr\": 0.004332062137833453,\n\
\ \"f1\": 0.2960843120805377,\n \"f1_stderr\": 0.004351433413685765\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\"\
: 0.0\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6866614048934491,\n\
\ \"acc_stderr\": 0.013036512096747976\n }\n}\n```"
repo_url: https://huggingface.co/JosephusCheung/Guanaco
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_drop_3
data_files:
- split: 2023_09_23T06_44_02.813633
path:
- '**/details_harness|drop|3_2023-09-23T06-44-02.813633.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-09-23T06-44-02.813633.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_09_23T06_44_02.813633
path:
- '**/details_harness|gsm8k|5_2023-09-23T06-44-02.813633.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-09-23T06-44-02.813633.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_09_23T06_44_02.813633
path:
- '**/details_harness|winogrande|5_2023-09-23T06-44-02.813633.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-09-23T06-44-02.813633.parquet'
- config_name: results
data_files:
- split: 2023_09_23T06_44_02.813633
path:
- results_2023-09-23T06-44-02.813633.parquet
- split: latest
path:
- results_2023-09-23T06-44-02.813633.parquet
---
# Dataset Card for Evaluation run of JosephusCheung/Guanaco
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/JosephusCheung/Guanaco
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [JosephusCheung/Guanaco](https://huggingface.co/JosephusCheung/Guanaco) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_JosephusCheung__Guanaco",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-09-23T06:44:02.813633](https://huggingface.co/datasets/open-llm-leaderboard/details_JosephusCheung__Guanaco/blob/main/results_2023-09-23T06-44-02.813633.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"em": 0.23343120805369127,
"em_stderr": 0.004332062137833453,
"f1": 0.2960843120805377,
"f1_stderr": 0.004351433413685765,
"acc": 0.34333070244672453,
"acc_stderr": 0.006518256048373988
},
"harness|drop|3": {
"em": 0.23343120805369127,
"em_stderr": 0.004332062137833453,
"f1": 0.2960843120805377,
"f1_stderr": 0.004351433413685765
},
"harness|gsm8k|5": {
"acc": 0.0,
"acc_stderr": 0.0
},
"harness|winogrande|5": {
"acc": 0.6866614048934491,
"acc_stderr": 0.013036512096747976
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
brianarimura/Tirre | ---
license: openrail
---
|
pesc101/spyder-ide-lbl-all | ---
dataset_info:
features:
- name: meta_data
struct:
- name: contains_class
dtype: bool
- name: contains_function
dtype: bool
- name: end_line
dtype: int64
- name: file_imports
sequence: string
- name: file_name
dtype: string
- name: module
dtype: string
- name: start_line
dtype: int64
- name: code
dtype: string
- name: question
dtype: string
- name: answer
dtype: string
- name: prompt
dtype: string
splits:
- name: train
num_bytes: 29948743
num_examples: 7859
download_size: 8407280
dataset_size: 29948743
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
ziwenyd/transcoder-geeksforgeeks | ---
license: mit
---
# statistics
cpp-java: 627 pairs
python-java: 616 pairs
cpp-python: 545 pairs
|
arbml/Commonsense_Validation | ---
dataset_info:
features:
- name: id
dtype: string
- name: first_sentence
dtype: string
- name: second_sentence
dtype: string
- name: label
dtype:
class_label:
names:
0: 0
1: 1
splits:
- name: train
num_bytes: 1420233
num_examples: 10000
- name: validation
num_bytes: 133986
num_examples: 1000
download_size: 837486
dataset_size: 1554219
---
# Dataset Card for "Commonsense_Validation"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
EleutherAI/quirky_capitals_alice_hard | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
dataset_info:
features:
- name: bob_label
dtype: bool
- name: alice_label
dtype: bool
- name: difficulty
dtype: float64
- name: statement
dtype: string
- name: choices
sequence: string
- name: character
dtype: string
- name: label
dtype: bool
splits:
- name: train
num_bytes: 14121.790811339199
num_examples: 128
- name: validation
num_bytes: 31658.112
num_examples: 288
- name: test
num_bytes: 30507.672
num_examples: 277
download_size: 34388
dataset_size: 76287.5748113392
---
# Dataset Card for "quirky_capitals_alice_hard"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
popaqy/europarl-bg-en | ---
dataset_info:
features:
- name: bg
dtype: string
- name: en
dtype: string
- name: sentence_len
dtype: int64
- name: clear
dtype: bool
splits:
- name: train
num_bytes: 178319272
num_examples: 408290
download_size: 83310937
dataset_size: 178319272
---
# Dataset Card for "europarl-bg-en"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
alabulei/gaianet-test | ---
license: apache-2.0
---
|
causal-lm/instruction_alphaca | ---
language: en
dataset_info:
features:
- name: instruction
dtype: string
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 17091630
num_examples: 46801
- name: validation
num_bytes: 1908482
num_examples: 5201
download_size: 12071362
dataset_size: 19000112
---
# Dataset Card for "instruction_alphaca"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Romanes/lineas.U | ---
license: cc-by-nc-nd-4.0
---
|
killah-t-cell/movie_stills_captioned_dataset | ---
dataset_info:
features:
- name: image
dtype: 'null'
- name: caption
dtype: 'null'
splits:
- name: train
download_size: 0
dataset_size: 0
---
# Dataset Card for "movie_stills_captioned_dataset"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
AdapterOcean/Open_Platypus_standardized_cluster_10_alpaca | ---
dataset_info:
features:
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 3175598
num_examples: 2612
download_size: 1539980
dataset_size: 3175598
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "Open_Platypus_standardized_cluster_10_alpaca"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
PaulLoisel/wcr_base | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
- split: val
path: data/val-*
dataset_info:
features:
- name: Age
dtype: int64
- name: Title
dtype: string
- name: Review Text
dtype: string
- name: Rating
dtype: int64
- name: label
dtype: int64
- name: Positive Feedback Count
dtype: int64
- name: Division Name
dtype: string
- name: Department Name
dtype: string
- name: Class Name
dtype: string
splits:
- name: train
num_bytes: 145123.3
num_examples: 350
- name: test
num_bytes: 31097.85
num_examples: 75
- name: val
num_bytes: 31097.85
num_examples: 75
download_size: 129979
dataset_size: 207319.0
---
# Dataset Card for "wcr_base"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
allenai/multinews_sparse_mean | ---
annotations_creators:
- expert-generated
language_creators:
- expert-generated
language:
- en
license:
- other
multilinguality:
- monolingual
pretty_name: Multi-News
size_categories:
- 10K<n<100K
source_datasets:
- original
task_categories:
- summarization
task_ids:
- news-articles-summarization
paperswithcode_id: multi-news
train-eval-index:
- config: default
task: summarization
task_id: summarization
splits:
train_split: train
eval_split: test
col_mapping:
document: text
summary: target
metrics:
- type: rouge
name: Rouge
---
This is a copy of the [Multi-News](https://huggingface.co/datasets/multi_news) dataset, except the input source documents of its `test` split have been replaced by a __sparse__ retriever. The retrieval pipeline used:
- __query__: The `summary` field of each example
- __corpus__: The union of all documents in the `train`, `validation` and `test` splits
- __retriever__: BM25 via [PyTerrier](https://pyterrier.readthedocs.io/en/latest/) with default settings
- __top-k strategy__: `"mean"`, i.e. the number of documents retrieved, `k`, is set as the mean number of documents seen across examples in this dataset, in this case `k==3`
Retrieval results on the `train` set:
| Recall@100 | Rprec | Precision@k | Recall@k |
| ----------- | ----------- | ----------- | ----------- |
| 0.8793 | 0.7460 | 0.6403 | 0.7417 |
Retrieval results on the `validation` set:
| Recall@100 | Rprec | Precision@k | Recall@k |
| ----------- | ----------- | ----------- | ----------- |
| 0.8748 | 0.7453 | 0.6361 | 0.7442 |
Retrieval results on the `test` set:
| Recall@100 | Rprec | Precision@k | Recall@k |
| ----------- | ----------- | ----------- | ----------- |
| 0.8775 | 0.7480 | 0.6370 | 0.7443 | |
wilsonslz/PABLO | ---
license: openrail
---
|
TearGosling/ARC_OWARIDA_concept | ---
license: cc-by-sa-4.0
---
|
autoevaluate/autoeval-staging-eval-project-cnn_dailymail-8f63e3f3-11895594 | ---
type: predictions
tags:
- autotrain
- evaluation
datasets:
- cnn_dailymail
eval_info:
task: summarization
model: philschmid/distilbart-cnn-12-6-samsum
metrics: []
dataset_name: cnn_dailymail
dataset_config: 3.0.0
dataset_split: test
col_mapping:
text: article
target: highlights
---
# Dataset Card for AutoTrain Evaluator
This repository contains model predictions generated by [AutoTrain](https://huggingface.co/autotrain) for the following task and dataset:
* Task: Summarization
* Model: philschmid/distilbart-cnn-12-6-samsum
* Dataset: cnn_dailymail
* Config: 3.0.0
* Split: test
To run new evaluation jobs, visit Hugging Face's [automatic model evaluator](https://huggingface.co/spaces/autoevaluate/model-evaluator).
## Contributions
Thanks to [@nbroad](https://huggingface.co/nbroad) for evaluating this model. |
jtatman/CoT_reformatted | ---
dataset_info:
features:
- name: instruction
dtype: string
- name: input
dtype: string
- name: output
dtype: string
- name: id
dtype: float64
splits:
- name: train
num_bytes: 2487281447
num_examples: 3229975
download_size: 1513934252
dataset_size: 2487281447
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
license: apache-2.0
task_categories:
- text-generation
- question-answering
language:
- en
- zh
tags:
- cot
- conversational
pretty_name: cot reformatted
size_categories:
- 1M<n<10M
---
# Dataset Card for "CoT_reformatted"
This dataset is reformatted from: QingyiSi/Alpaca-CoT
All credit goes there. Thanks to QingyiSi for the work in consolidating many diverse sources for comparison and cross-file analysis.
There were some issues loading files from that dataset for a testing project.
I extracted the following data files for this subset:
- alpaca_data_cleaned
- CoT_data
- firefly
- instruct
- alpaca_gpt4_data
- dolly
- GPTeacher
- thoughtsource
- finance_en
- instinwild_en
|
AdapterOcean/med_alpaca_standardized_cluster_29_std | ---
dataset_info:
features:
- name: message
dtype: string
- name: message_type
dtype: string
- name: message_id
dtype: int64
- name: conversation_id
dtype: int64
- name: cluster
dtype: float64
- name: __index_level_0__
dtype: int64
splits:
- name: train
num_bytes: 21052572
num_examples: 44806
download_size: 10662713
dataset_size: 21052572
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "med_alpaca_standardized_cluster_29_std"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
gokuls/glue_augmented_cola | ---
license: apache-2.0
---
# Dataset Card for glue_augmented_cola
## Dataset Description
Augmented COLA dataset
**Reference:** https://huggingface.co/datasets/glue |
sinonimayzer/mixed-data | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 17829001968.66333
num_examples: 10391742
- name: validation
num_bytes: 1981001934.4298058
num_examples: 1154639
download_size: 12707112125
dataset_size: 19810003903.093136
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
---
|
ineoApp/dataset-02-16 | ---
dataset_info:
features:
- name: id
dtype: string
- name: image
dtype: image
- name: bboxes
sequence:
sequence: int64
- name: ner_tags
sequence:
class_label:
names:
'0': O
'1': numero facture
'2': Telephone
'3': Email
'4': Site web
'5': RC
'6': CNSS
'7': TP
'8': Mode de paiement
'9': fournisseur
'10': date facture
'11': date limite
'12': montant ht
'13': montant ttc
'14': tva
'15': prix tva
'16': addresse
'17': reference
'18': Devise
'19': ICE fournisseur
'20': IF fournisseur
'21': Condition de paiement
'22': informations
'23': art1 designation
'24': art1 quantite
'25': art1 prix unit
'26': art1 tva
'27': art1 montant ht
'28': art1 Article
'29': art1 taux de remise
'30': art2 designation
'31': art2 quantite
'32': art2 prix unit
'33': art2 tva
'34': art2 montant ht
'35': art2 Article
'36': art2 taux de remise
'37': art3 designation
'38': art3 quantite
'39': art3 prix unit
'40': art3 tva
'41': art3 montant ht
'42': art3 Article
'43': art3 taux de remise
'44': art4 designation
'45': art4 quantite
'46': art4 prix unit
'47': art4 tva
'48': art4 montant ht
'49': art4 Article
'50': art4 taux de remise
'51': art5 designation
'52': art5 quantite
'53': art5 prix unit
'54': art5 tva
'55': art5 montant ht
'56': art5 Article
'57': art5 taux de remise
'58': art6 designation
'59': art6 quantite
'60': art6 prix unit
'61': art6 tva
'62': art6 montant ht
'63': art6 Article
'64': art6 taux de remise
'65': art7 designation
'66': art7 quantite
'67': art7 prix unit
'68': art7 tva
'69': art7 montant ht
'70': art7 Article
'71': art7 taux de remise
'72': art8 designation
'73': art8 quantite
'74': art8 prix unit
'75': art8 tva
'76': art8 montant ht
'77': art8 Article
'78': art8 taux de remise
'79': art9 designation
'80': art9 quantite
'81': art9 prix unit
'82': art9 tva
'83': art9 montant ht
'84': art9 Article
'85': art9 taux de remise
'86': art10 designation
'87': art10 quantite
'88': art10 prix unit
'89': art10 tva
'90': art10 montant ht
'91': art10 Article
'92': art10 taux de remise
'93': art11 designation
'94': art11 quantite
'95': art11 prix unit
'96': art11 tva
'97': art11 montant ht
'98': art11 Article
'99': art11 taux de remise
'100': art12 designation
'101': art12 quantite
'102': art12 prix unit
'103': art12 tva
'104': art12 montant ht
'105': art12 Article
'106': art12 taux de remise
'107': art13 designation
'108': art13 quantite
'109': art13 prix unit
'110': art13 tva
'111': art13 montant ht
'112': art13 Article
'113': art13 taux de remise
'114': art14 designation
'115': art14 quantite
'116': art14 prix unit
'117': art14 tva
'118': art14 montant ht
'119': art14 Article
'120': art14 taux de remise
- name: tokens
sequence: string
splits:
- name: train
num_bytes: 362851126.49572647
num_examples: 280
- name: test
num_bytes: 92008678.5042735
num_examples: 71
download_size: 435978677
dataset_size: 454859805.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
liuyanchen1015/MULTI_VALUE_cola_relativizer_doubling | ---
dataset_info:
features:
- name: sentence
dtype: string
- name: label
dtype: int64
- name: idx
dtype: int64
- name: value_score
dtype: int64
splits:
- name: dev
num_bytes: 4481
num_examples: 48
- name: test
num_bytes: 3173
num_examples: 35
- name: train
num_bytes: 44408
num_examples: 457
download_size: 30123
dataset_size: 52062
---
# Dataset Card for "MULTI_VALUE_cola_relativizer_doubling"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
CyberHarem/click_arknights | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of click/ใซใทใฃ/ๅก่พพ (Arknights)
This is the dataset of click/ใซใทใฃ/ๅก่พพ (Arknights), containing 59 images and their tags.
The core tags of this character are `animal_ears, short_hair, mouse_ears, grey_hair, mouse_girl, black_headwear, hat, baseball_cap, blue_eyes, ears_through_headwear, tail, mouse_tail, hair_between_eyes`, which are pruned in this dataset.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
## List of Packages
| Name | Images | Size | Download | Type | Description |
|:-----------------|---------:|:-----------|:-----------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------|
| raw | 59 | 83.36 MiB | [Download](https://huggingface.co/datasets/CyberHarem/click_arknights/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). |
| 1200 | 59 | 70.10 MiB | [Download](https://huggingface.co/datasets/CyberHarem/click_arknights/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. |
| stage3-p480-1200 | 140 | 146.40 MiB | [Download](https://huggingface.co/datasets/CyberHarem/click_arknights/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
### Load Raw Dataset with Waifuc
We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code
```python
import os
import zipfile
from huggingface_hub import hf_hub_download
from waifuc.source import LocalSource
# download raw archive file
zip_file = hf_hub_download(
repo_id='CyberHarem/click_arknights',
repo_type='dataset',
filename='dataset-raw.zip',
)
# extract files to your directory
dataset_dir = 'dataset_dir'
os.makedirs(dataset_dir, exist_ok=True)
with zipfile.ZipFile(zip_file, 'r') as zf:
zf.extractall(dataset_dir)
# load the dataset with waifuc
source = LocalSource(dataset_dir)
for item in source:
print(item.image, item.meta['filename'], item.meta['tags'])
```
## List of Clusters
List of tag clustering result, maybe some outfits can be mined here.
### Raw Text Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| 0 | 8 |  |  |  |  |  | 1girl, black_jacket, long_sleeves, looking_at_viewer, solo, official_alternate_costume, open_jacket, smile, upper_body, backpack, hand_on_headwear, simple_background, yellow_shirt, closed_mouth, black_gloves, fingerless_gloves, white_background |
| 1 | 13 |  |  |  |  |  | 1girl, black_jacket, long_sleeves, open_jacket, solo, black_skirt, yellow_shirt, simple_background, looking_at_viewer, smile, backpack, black_gloves, holding, fingerless_gloves, grey_pantyhose, white_background, full_body, staff, open_mouth, one_eye_closed, sneakers, white_footwear |
| 2 | 9 |  |  |  |  |  | 1girl, eyepatch, long_sleeves, looking_at_viewer, official_alternate_costume, solo, white_shirt, black_jacket, red_eyes, smile, crop_top, midriff, navel, top_hat, black_bowtie, belt, black_skirt, cowboy_shot, halloween, open_mouth, simple_background, white_background |
### Table Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | black_jacket | long_sleeves | looking_at_viewer | solo | official_alternate_costume | open_jacket | smile | upper_body | backpack | hand_on_headwear | simple_background | yellow_shirt | closed_mouth | black_gloves | fingerless_gloves | white_background | black_skirt | holding | grey_pantyhose | full_body | staff | open_mouth | one_eye_closed | sneakers | white_footwear | eyepatch | white_shirt | red_eyes | crop_top | midriff | navel | top_hat | black_bowtie | belt | cowboy_shot | halloween |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:---------------|:---------------|:--------------------|:-------|:-----------------------------|:--------------|:--------|:-------------|:-----------|:-------------------|:--------------------|:---------------|:---------------|:---------------|:--------------------|:-------------------|:--------------|:----------|:-----------------|:------------|:--------|:-------------|:-----------------|:-----------|:-----------------|:-----------|:--------------|:-----------|:-----------|:----------|:--------|:----------|:---------------|:-------|:--------------|:------------|
| 0 | 8 |  |  |  |  |  | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | | | | | | | | | | |
| 1 | 13 |  |  |  |  |  | X | X | X | X | X | | X | X | | X | | X | X | | X | X | X | X | X | X | X | X | X | X | X | X | | | | | | | | | | | |
| 2 | 9 |  |  |  |  |  | X | X | X | X | X | X | | X | | | | X | | | | | X | X | | | | | X | | | | X | X | X | X | X | X | X | X | X | X | X |
|
jan-hq/bagel_dpo_binarized | ---
dataset_info:
features:
- name: chosen
list:
- name: content
dtype: string
- name: role
dtype: string
- name: rejected
list:
- name: content
dtype: string
- name: role
dtype: string
splits:
- name: train
num_bytes: 214413112.84474632
num_examples: 56435
- name: test
num_bytes: 11287700.155253679
num_examples: 2971
download_size: 127148178
dataset_size: 225700813.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
WorkWithData/Stolen_Artworks | ---
license: cc-by-4.0
---
This dataset contains artoworks classified by Interpol as stolen. The dataset can also be found and filtered differently on: https://www.workwithdata.com/dataset?entity=artworks&f=1&fcol0=museum&fop0=%3D&fval0=Stolen+art+%28Interpol%29
Similar datasets can also be found on: https://www.workwithdata.com |
jkeisling/hacker-news-corpus-2007-2022 | ---
license: mit
language:
- en
pretty_name: Hacker News posts and comments, 2007-Nov 2022
size_categories:
- 10M<n<100M
---
# Hacker News corpus, 2007-Nov 2022
## Dataset Description
### Dataset Summary
**Dataset Name:** Hacker News Full Corpus (2007 - November 2022)
**Description:**
> NOTE: I am not affiliated with Y Combinator.
This dataset is a July 2023 snapshot of YCombinator's [BigQuery dump](https://console.cloud.google.com/marketplace/details/y-combinator/hacker-news) of the entire archive of posts and comments made on Hacker News. It contains posts from Hacker News' inception in 2007 through to November 16, 2022, when the BigQuery database was last updated.
The dataset does not incorporate any modifications or filtering - it is a raw dump from the original dataset provided by YCombinator. Hence, it retains the same structure and fields as the original BigQuery table, serving as a ready-to-use resource for conducting large-scale data analysis or training language models.
All credit for the original data collection and maintenance goes to YCombinator and the original post and comment authors. This version of the dataset has been prepared for convenience and ease of use within the HuggingFace ecosystem, especially for those interested in offline usage or who prefer not to use Google Cloud.
Please bear in mind that this dataset is a snapshot and will probably not be updated. For the latest data, consider accessing the live data directly from the official [Hacker News API](https://github.com/HackerNews/API), potentially using [Anant Narayanan's scripts](https://www.kix.in/2023/05/05/hacker-news-chatgpt-plugin/#downloading-the-dataset).
Please use responsibly, respecting all relevant terms of use and privacy considerations inherent in the data.
### Languages
English
## Dataset Structure
### Data Fields
| fullname | mode | type | description |
| ----------- | -------- | --------- | ------------------------------------------------------------ |
| title | NULLABLE | STRING | Story title |
| url | NULLABLE | STRING | Story url |
| text | NULLABLE | STRING | Story or comment text |
| dead | NULLABLE | BOOLEAN | Is dead? |
| by | NULLABLE | STRING | The username of the item's author. |
| score | NULLABLE | INTEGER | Story score |
| time | NULLABLE | INTEGER | Unix time |
| timestamp | NULLABLE | TIMESTAMP | Timestamp for the unix time |
| type | NULLABLE | STRING | Type of details (comment, comment_ranking, poll, story, job, pollopt) |
| id | NULLABLE | INTEGER | The item's unique id. |
| parent | NULLABLE | INTEGER | Parent comment ID |
| descendants | NULLABLE | INTEGER | Number of story or poll descendants |
| ranking | NULLABLE | INTEGER | Comment ranking |
| deleted | NULLABLE | BOOLEAN | Is deleted? |
## Dataset Creation
### Curation Rationale
This dataset provides a snapshot of the Hacker News posts and comments archive, sourced from YCombinator's open data, to enable easy and direct access without the need for a Google Cloud account or BigQuery interface, and without putting undue strain on the HN API. It aims to simplify the data acquisition process, promoting its use within the HuggingFace ecosystem for various tasks including analysis, trend prediction, sentiment studies, and language model training. By minimizing barriers to access, this dataset encourages a wider usage, fostering innovation in natural language processing and related fields.
### Annotations
### Personal and Sensitive Information
This dataset has not undergone specific checks for personally identifiable information (PII); hence, it's possible that some may exist within the data. However, as the data source is publicly available and shared by YCombinator, any potential PII present is already part of the public domain.
## Considerations for Using the Data
### Social Impact of Dataset
The collective wisdom and perspectives captured in the posts and comments of this Hacker News dataset represent a unique gift from YCombinator and countless contributors worldwide; it is part of the common heritage of humanity. The potential insights to be gleaned and the future knowledge to be generated, especially through the training of language models on this corpus, can provide unbounded new perspectives, enriching our understanding and potential solutions to complex issues. It is a testament to the power of shared knowledge and open dialogue in shaping the world. While there is a risk that some may use language models trained on this dataset for disinformation purposes, it's worth noting that the misuse of technology is a challenge that predates this dataset. The proverbial horse of misused technology has long since left the barn; our focus now must be on harnessing this shared intellectual legacy responsibly for the common good.
### Discussion of Biases
Given that Hacker News is a technology-focused platform with a largely self-selected user base, the content and perspectives found within this dataset may lean towards technology, entrepreneurship, and related fields, often reflecting the views and biases of this specific community. As such, users should be aware that analysis drawn from this data may not fully represent a balanced, global perspective and might contain inherent biases towards topics and viewpoints that are overrepresented in the Hacker News community.
## Additional Information
### Licensing Information
In the absence of an explicit license for the upstream BigQuery dataset, this dataset uses the same MIT license as the Hacker News API. The upstream terms of use are reproduced here:
> This dataset is publicly available for anyone to use under the following terms provided by the Dataset Source - [https://github.com/HackerNews/API ](https://github.com/HackerNews/API) - and is provided "AS IS" without any warranty, express or implied, from Google. Google disclaims all liability for any damages, direct or indirect, resulting from the use of the dataset. |
yzhuang/autotree_nnxor_l1_26 | ---
dataset_info:
features:
- name: id
dtype: int64
- name: input_x
sequence:
sequence: float32
- name: input_y
sequence:
sequence: float32
- name: rtg
sequence:
sequence: float64
- name: status
sequence:
sequence: float32
- name: split_threshold
sequence:
sequence: float64
- name: split_dimension
sequence: int64
splits:
- name: train
num_bytes: 8001200000
num_examples: 100000
- name: validation
num_bytes: 800120000
num_examples: 10000
- name: test
num_bytes: 800120000
num_examples: 10000
download_size: 7974442465
dataset_size: 9601440000
---
# Dataset Card for "autotree_nnxor_l1_26"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Circularmachines/batch_indexing_machine_230529_005 | ---
dataset_info:
features:
- name: image
dtype: image
splits:
- name: train
num_bytes: 158262680.0
num_examples: 720
download_size: 158274633
dataset_size: 158262680.0
---
# Dataset Card for "batch_indexing_machine_230529_005"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
lsb/poetaexmachina-recitations-milli-d6vergil | ---
dataset_info:
features:
- name: audio
dtype: audio
- name: sentence
dtype: string
splits:
- name: train
num_bytes: 2850254.0
num_examples: 17
download_size: 2612867
dataset_size: 2850254.0
---
# Dataset Card for "poetaexmachina-recitations-milli-d6vergil"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
open-llm-leaderboard/details_ammarali32__MultiVerse_LASER | ---
pretty_name: Evaluation run of ammarali32/MultiVerse_LASER
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [ammarali32/MultiVerse_LASER](https://huggingface.co/ammarali32/MultiVerse_LASER)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ammarali32__MultiVerse_LASER\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-03-13T17:36:07.539880](https://huggingface.co/datasets/open-llm-leaderboard/details_ammarali32__MultiVerse_LASER/blob/main/results_2024-03-13T17-36-07.539880.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6514265356884416,\n\
\ \"acc_stderr\": 0.032078933959357805,\n \"acc_norm\": 0.6506378382024072,\n\
\ \"acc_norm_stderr\": 0.03275314934353573,\n \"mc1\": 0.6364749082007344,\n\
\ \"mc1_stderr\": 0.016838862883965834,\n \"mc2\": 0.7769793072107058,\n\
\ \"mc2_stderr\": 0.013698220900182868\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.7056313993174061,\n \"acc_stderr\": 0.013318528460539419,\n\
\ \"acc_norm\": 0.7252559726962458,\n \"acc_norm_stderr\": 0.013044617212771227\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7126070503883688,\n\
\ \"acc_stderr\": 0.004516215206715358,\n \"acc_norm\": 0.888070105556662,\n\
\ \"acc_norm_stderr\": 0.0031463583832603585\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \
\ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6518518518518519,\n\
\ \"acc_stderr\": 0.041153246103369526,\n \"acc_norm\": 0.6518518518518519,\n\
\ \"acc_norm_stderr\": 0.041153246103369526\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n\
\ \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n\
\ \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \
\ \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.02794321998933714,\n\
\ \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.02794321998933714\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n\
\ \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n\
\ \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \
\ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"\
acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \
\ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\
\ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n\
\ \"acc_stderr\": 0.036430371689585475,\n \"acc_norm\": 0.6473988439306358,\n\
\ \"acc_norm_stderr\": 0.036430371689585475\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.3235294117647059,\n \"acc_stderr\": 0.04655010411319617,\n\
\ \"acc_norm\": 0.3235294117647059,\n \"acc_norm_stderr\": 0.04655010411319617\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n\
\ \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5617021276595745,\n \"acc_stderr\": 0.03243618636108102,\n\
\ \"acc_norm\": 0.5617021276595745,\n \"acc_norm_stderr\": 0.03243618636108102\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n\
\ \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n\
\ \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n\
\ \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.40476190476190477,\n \"acc_stderr\": 0.025279850397404907,\n \"\
acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.025279850397404907\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n\
\ \"acc_stderr\": 0.044444444444444495,\n \"acc_norm\": 0.4444444444444444,\n\
\ \"acc_norm_stderr\": 0.044444444444444495\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \
\ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7935483870967742,\n\
\ \"acc_stderr\": 0.023025899617188716,\n \"acc_norm\": 0.7935483870967742,\n\
\ \"acc_norm_stderr\": 0.023025899617188716\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n\
\ \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\"\
: 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n\
\ \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.797979797979798,\n \"acc_stderr\": 0.02860620428922987,\n \"acc_norm\"\
: 0.797979797979798,\n \"acc_norm_stderr\": 0.02860620428922987\n },\n\
\ \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \
\ \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603346,\n\
\ \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603346\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.658974358974359,\n \"acc_stderr\": 0.024035489676335082,\n \
\ \"acc_norm\": 0.658974358974359,\n \"acc_norm_stderr\": 0.024035489676335082\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028593,\n \
\ \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028593\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.030283995525884396,\n \
\ \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.030283995525884396\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242742,\n \"\
acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242742\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8532110091743119,\n \"acc_stderr\": 0.015173141845126243,\n \"\
acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.015173141845126243\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5740740740740741,\n \"acc_stderr\": 0.033723432716530624,\n \"\
acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.033723432716530624\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455334,\n \"\
acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455334\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.810126582278481,\n \"acc_stderr\": 0.02553010046023349,\n \
\ \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.02553010046023349\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n\
\ \"acc_stderr\": 0.031381476375754995,\n \"acc_norm\": 0.6771300448430493,\n\
\ \"acc_norm_stderr\": 0.031381476375754995\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.8091603053435115,\n \"acc_stderr\": 0.03446513350752598,\n\
\ \"acc_norm\": 0.8091603053435115,\n \"acc_norm_stderr\": 0.03446513350752598\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070416,\n \"\
acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070416\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n\
\ \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n\
\ \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n\
\ \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n\
\ \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n\
\ \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n\
\ \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n\
\ \"acc_stderr\": 0.021586494001281376,\n \"acc_norm\": 0.8760683760683761,\n\
\ \"acc_norm_stderr\": 0.021586494001281376\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8237547892720306,\n\
\ \"acc_stderr\": 0.013625556907993466,\n \"acc_norm\": 0.8237547892720306,\n\
\ \"acc_norm_stderr\": 0.013625556907993466\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7312138728323699,\n \"acc_stderr\": 0.023868003262500107,\n\
\ \"acc_norm\": 0.7312138728323699,\n \"acc_norm_stderr\": 0.023868003262500107\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.42681564245810055,\n\
\ \"acc_stderr\": 0.016542401954631917,\n \"acc_norm\": 0.42681564245810055,\n\
\ \"acc_norm_stderr\": 0.016542401954631917\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137897,\n\
\ \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137897\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n\
\ \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n\
\ \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7376543209876543,\n \"acc_stderr\": 0.024477222856135114,\n\
\ \"acc_norm\": 0.7376543209876543,\n \"acc_norm_stderr\": 0.024477222856135114\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.475177304964539,\n \"acc_stderr\": 0.02979071924382972,\n \
\ \"acc_norm\": 0.475177304964539,\n \"acc_norm_stderr\": 0.02979071924382972\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4706649282920469,\n\
\ \"acc_stderr\": 0.012748238397365549,\n \"acc_norm\": 0.4706649282920469,\n\
\ \"acc_norm_stderr\": 0.012748238397365549\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.02841820861940676,\n\
\ \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.02841820861940676\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6764705882352942,\n \"acc_stderr\": 0.018926082916083383,\n \
\ \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.018926082916083383\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n\
\ \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n\
\ \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n\
\ \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n\
\ \"acc_stderr\": 0.026193923544454115,\n \"acc_norm\": 0.835820895522388,\n\
\ \"acc_norm_stderr\": 0.026193923544454115\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \
\ \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n\
\ \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n\
\ \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n\
\ \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.6364749082007344,\n\
\ \"mc1_stderr\": 0.016838862883965834,\n \"mc2\": 0.7769793072107058,\n\
\ \"mc2_stderr\": 0.013698220900182868\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.8492501973164956,\n \"acc_stderr\": 0.010056094631479674\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6952236542835482,\n \
\ \"acc_stderr\": 0.012679297549515437\n }\n}\n```"
repo_url: https://huggingface.co/ammarali32/MultiVerse_LASER
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|arc:challenge|25_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|gsm8k|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hellaswag|10_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-13T17-36-07.539880.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-13T17-36-07.539880.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- '**/details_harness|winogrande|5_2024-03-13T17-36-07.539880.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-03-13T17-36-07.539880.parquet'
- config_name: results
data_files:
- split: 2024_03_13T17_36_07.539880
path:
- results_2024-03-13T17-36-07.539880.parquet
- split: latest
path:
- results_2024-03-13T17-36-07.539880.parquet
---
# Dataset Card for Evaluation run of ammarali32/MultiVerse_LASER
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [ammarali32/MultiVerse_LASER](https://huggingface.co/ammarali32/MultiVerse_LASER) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_ammarali32__MultiVerse_LASER",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-03-13T17:36:07.539880](https://huggingface.co/datasets/open-llm-leaderboard/details_ammarali32__MultiVerse_LASER/blob/main/results_2024-03-13T17-36-07.539880.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6514265356884416,
"acc_stderr": 0.032078933959357805,
"acc_norm": 0.6506378382024072,
"acc_norm_stderr": 0.03275314934353573,
"mc1": 0.6364749082007344,
"mc1_stderr": 0.016838862883965834,
"mc2": 0.7769793072107058,
"mc2_stderr": 0.013698220900182868
},
"harness|arc:challenge|25": {
"acc": 0.7056313993174061,
"acc_stderr": 0.013318528460539419,
"acc_norm": 0.7252559726962458,
"acc_norm_stderr": 0.013044617212771227
},
"harness|hellaswag|10": {
"acc": 0.7126070503883688,
"acc_stderr": 0.004516215206715358,
"acc_norm": 0.888070105556662,
"acc_norm_stderr": 0.0031463583832603585
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.32,
"acc_stderr": 0.04688261722621504,
"acc_norm": 0.32,
"acc_norm_stderr": 0.04688261722621504
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6518518518518519,
"acc_stderr": 0.041153246103369526,
"acc_norm": 0.6518518518518519,
"acc_norm_stderr": 0.041153246103369526
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7039473684210527,
"acc_stderr": 0.03715062154998904,
"acc_norm": 0.7039473684210527,
"acc_norm_stderr": 0.03715062154998904
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.64,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.64,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7094339622641509,
"acc_stderr": 0.02794321998933714,
"acc_norm": 0.7094339622641509,
"acc_norm_stderr": 0.02794321998933714
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7569444444444444,
"acc_stderr": 0.03586879280080341,
"acc_norm": 0.7569444444444444,
"acc_norm_stderr": 0.03586879280080341
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.55,
"acc_stderr": 0.05,
"acc_norm": 0.55,
"acc_norm_stderr": 0.05
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6473988439306358,
"acc_stderr": 0.036430371689585475,
"acc_norm": 0.6473988439306358,
"acc_norm_stderr": 0.036430371689585475
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3235294117647059,
"acc_stderr": 0.04655010411319617,
"acc_norm": 0.3235294117647059,
"acc_norm_stderr": 0.04655010411319617
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5617021276595745,
"acc_stderr": 0.03243618636108102,
"acc_norm": 0.5617021276595745,
"acc_norm_stderr": 0.03243618636108102
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.04702880432049615,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.04702880432049615
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5448275862068965,
"acc_stderr": 0.04149886942192117,
"acc_norm": 0.5448275862068965,
"acc_norm_stderr": 0.04149886942192117
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.40476190476190477,
"acc_stderr": 0.025279850397404907,
"acc_norm": 0.40476190476190477,
"acc_norm_stderr": 0.025279850397404907
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.044444444444444495,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.044444444444444495
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.31,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.31,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7935483870967742,
"acc_stderr": 0.023025899617188716,
"acc_norm": 0.7935483870967742,
"acc_norm_stderr": 0.023025899617188716
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5123152709359606,
"acc_stderr": 0.035169204442208966,
"acc_norm": 0.5123152709359606,
"acc_norm_stderr": 0.035169204442208966
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7575757575757576,
"acc_stderr": 0.03346409881055953,
"acc_norm": 0.7575757575757576,
"acc_norm_stderr": 0.03346409881055953
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.797979797979798,
"acc_stderr": 0.02860620428922987,
"acc_norm": 0.797979797979798,
"acc_norm_stderr": 0.02860620428922987
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9015544041450777,
"acc_stderr": 0.02150024957603346,
"acc_norm": 0.9015544041450777,
"acc_norm_stderr": 0.02150024957603346
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.658974358974359,
"acc_stderr": 0.024035489676335082,
"acc_norm": 0.658974358974359,
"acc_norm_stderr": 0.024035489676335082
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.32222222222222224,
"acc_stderr": 0.028493465091028593,
"acc_norm": 0.32222222222222224,
"acc_norm_stderr": 0.028493465091028593
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.680672268907563,
"acc_stderr": 0.030283995525884396,
"acc_norm": 0.680672268907563,
"acc_norm_stderr": 0.030283995525884396
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.36423841059602646,
"acc_stderr": 0.03929111781242742,
"acc_norm": 0.36423841059602646,
"acc_norm_stderr": 0.03929111781242742
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8532110091743119,
"acc_stderr": 0.015173141845126243,
"acc_norm": 0.8532110091743119,
"acc_norm_stderr": 0.015173141845126243
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5740740740740741,
"acc_stderr": 0.033723432716530624,
"acc_norm": 0.5740740740740741,
"acc_norm_stderr": 0.033723432716530624
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8431372549019608,
"acc_stderr": 0.02552472232455334,
"acc_norm": 0.8431372549019608,
"acc_norm_stderr": 0.02552472232455334
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.810126582278481,
"acc_stderr": 0.02553010046023349,
"acc_norm": 0.810126582278481,
"acc_norm_stderr": 0.02553010046023349
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6771300448430493,
"acc_stderr": 0.031381476375754995,
"acc_norm": 0.6771300448430493,
"acc_norm_stderr": 0.031381476375754995
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.8091603053435115,
"acc_stderr": 0.03446513350752598,
"acc_norm": 0.8091603053435115,
"acc_norm_stderr": 0.03446513350752598
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7603305785123967,
"acc_stderr": 0.03896878985070416,
"acc_norm": 0.7603305785123967,
"acc_norm_stderr": 0.03896878985070416
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7962962962962963,
"acc_stderr": 0.03893542518824847,
"acc_norm": 0.7962962962962963,
"acc_norm_stderr": 0.03893542518824847
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7730061349693251,
"acc_stderr": 0.03291099578615769,
"acc_norm": 0.7730061349693251,
"acc_norm_stderr": 0.03291099578615769
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.44642857142857145,
"acc_stderr": 0.04718471485219588,
"acc_norm": 0.44642857142857145,
"acc_norm_stderr": 0.04718471485219588
},
"harness|hendrycksTest-management|5": {
"acc": 0.7766990291262136,
"acc_stderr": 0.04123553189891431,
"acc_norm": 0.7766990291262136,
"acc_norm_stderr": 0.04123553189891431
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8760683760683761,
"acc_stderr": 0.021586494001281376,
"acc_norm": 0.8760683760683761,
"acc_norm_stderr": 0.021586494001281376
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8237547892720306,
"acc_stderr": 0.013625556907993466,
"acc_norm": 0.8237547892720306,
"acc_norm_stderr": 0.013625556907993466
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7312138728323699,
"acc_stderr": 0.023868003262500107,
"acc_norm": 0.7312138728323699,
"acc_norm_stderr": 0.023868003262500107
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.42681564245810055,
"acc_stderr": 0.016542401954631917,
"acc_norm": 0.42681564245810055,
"acc_norm_stderr": 0.016542401954631917
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7222222222222222,
"acc_stderr": 0.025646863097137897,
"acc_norm": 0.7222222222222222,
"acc_norm_stderr": 0.025646863097137897
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.707395498392283,
"acc_stderr": 0.02583989833487798,
"acc_norm": 0.707395498392283,
"acc_norm_stderr": 0.02583989833487798
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7376543209876543,
"acc_stderr": 0.024477222856135114,
"acc_norm": 0.7376543209876543,
"acc_norm_stderr": 0.024477222856135114
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.475177304964539,
"acc_stderr": 0.02979071924382972,
"acc_norm": 0.475177304964539,
"acc_norm_stderr": 0.02979071924382972
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4706649282920469,
"acc_stderr": 0.012748238397365549,
"acc_norm": 0.4706649282920469,
"acc_norm_stderr": 0.012748238397365549
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.02841820861940676,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.02841820861940676
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6764705882352942,
"acc_stderr": 0.018926082916083383,
"acc_norm": 0.6764705882352942,
"acc_norm_stderr": 0.018926082916083383
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7224489795918367,
"acc_stderr": 0.028666857790274648,
"acc_norm": 0.7224489795918367,
"acc_norm_stderr": 0.028666857790274648
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.835820895522388,
"acc_stderr": 0.026193923544454115,
"acc_norm": 0.835820895522388,
"acc_norm_stderr": 0.026193923544454115
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.84,
"acc_stderr": 0.03684529491774709,
"acc_norm": 0.84,
"acc_norm_stderr": 0.03684529491774709
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5602409638554217,
"acc_stderr": 0.03864139923699122,
"acc_norm": 0.5602409638554217,
"acc_norm_stderr": 0.03864139923699122
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8304093567251462,
"acc_stderr": 0.02878210810540171,
"acc_norm": 0.8304093567251462,
"acc_norm_stderr": 0.02878210810540171
},
"harness|truthfulqa:mc|0": {
"mc1": 0.6364749082007344,
"mc1_stderr": 0.016838862883965834,
"mc2": 0.7769793072107058,
"mc2_stderr": 0.013698220900182868
},
"harness|winogrande|5": {
"acc": 0.8492501973164956,
"acc_stderr": 0.010056094631479674
},
"harness|gsm8k|5": {
"acc": 0.6952236542835482,
"acc_stderr": 0.012679297549515437
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
dmayhem93/self-critiquing-refine-continuations | ---
dataset_info:
features:
- name: id
dtype: string
- name: source_id
dtype: string
- name: split
dtype: string
- name: time
dtype: float64
- name: labeler
dtype: string
- name: is_topic_based_summarization
dtype: bool
- name: category
dtype: string
- name: severity
dtype: int64
- name: text_quotes
list:
- name: begin
dtype: int64
- name: end
dtype: int64
- name: response_quotes
list:
- name: begin
dtype: int64
- name: end
dtype: int64
- name: prompt
dtype: string
- name: response
dtype: string
splits:
- name: test
num_bytes: 26105991
num_examples: 5119
download_size: 5089186
dataset_size: 26105991
---
# Dataset Card for "self-critiquing-refine-continuations"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
AdapterOcean/med_alpaca_standardized_cluster_96_alpaca | ---
dataset_info:
features:
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 18857563
num_examples: 13847
download_size: 9295048
dataset_size: 18857563
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "med_alpaca_standardized_cluster_96_alpaca"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Cubpaw/voxelgym_5c_new_critic_42x42_50000 | ---
dataset_info:
features:
- name: image
dtype: image
- name: astar_path
dtype: image
- name: pred_path
sequence:
sequence: float32
splits:
- name: train
num_bytes: 302414480.0
num_examples: 40000
- name: validation
num_bytes: 75600070.0
num_examples: 10000
download_size: 38192777
dataset_size: 378014550.0
---
# Dataset Card for "voxelgym_5c_new_critic_42x42_50000"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Gabrielkdc/endpoints-userstories | ---
license: apache-2.0
task_categories:
- text-classification
language:
- en
---
This dataset contains a list of endpoints with parameters and a column for the user story of that endpoint |
Longhui98/Graph_LLM | ---
license: apache-2.0
---
|
mask-distilled-onesec-cv12-each-chunk-uniq/chunk_254 | ---
dataset_info:
features:
- name: logits
sequence: float32
- name: mfcc
sequence:
sequence: float64
splits:
- name: train
num_bytes: 737392888.0
num_examples: 144814
download_size: 745901507
dataset_size: 737392888.0
---
# Dataset Card for "chunk_254"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
JinglesDados/Wantuir | ---
license: openrail
---
|
metaeval/spartqa-yn | ---
license: apache-2.0
---
```
@inproceedings{mirzaee-etal-2021-spartqa,
title = "{SPARTQA}: A Textual Question Answering Benchmark for Spatial Reasoning",
author = "Mirzaee, Roshanak and
Rajaby Faghihi, Hossein and
Ning, Qiang and
Kordjamshidi, Parisa",
booktitle = "Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies",
month = jun,
year = "2021",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.naacl-main.364",
doi = "10.18653/v1/2021.naacl-main.364",
pages = "4582--4598",
}
``` |
liuyanchen1015/MULTI_VALUE_sst2_regularized_plurals | ---
dataset_info:
features:
- name: sentence
dtype: string
- name: label
dtype: int64
- name: idx
dtype: int64
- name: score
dtype: int64
splits:
- name: dev
num_bytes: 8198
num_examples: 54
- name: test
num_bytes: 15713
num_examples: 95
- name: train
num_bytes: 240193
num_examples: 1929
download_size: 134637
dataset_size: 264104
---
# Dataset Card for "MULTI_VALUE_sst2_regularized_plurals"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Birchlabs/openai-prm800k-phase2_test-solutions-only | ---
license: mit
---
|
looper525/mini-platypus | ---
dataset_info:
features:
- name: input
dtype: string
- name: output
dtype: string
- name: instruction
dtype: string
- name: data_source
dtype: string
splits:
- name: train
num_bytes: 30776452
num_examples: 24926
download_size: 15552844
dataset_size: 30776452
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
77asadian/td_lte_ds | ---
license: mit
---
|
shidowake/cosmopedia-japanese-subset_from_aixsatoshi_filtered-sharegpt-format-with-system-prompt_split_3 | ---
dataset_info:
features:
- name: conversations
list:
- name: from
dtype: string
- name: value
dtype: string
splits:
- name: train
num_bytes: 3990625.4590984974
num_examples: 499
download_size: 2411761
dataset_size: 3990625.4590984974
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/details_vicgalle__solarized-13B-dpo | ---
pretty_name: Evaluation run of vicgalle/solarized-13B-dpo
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [vicgalle/solarized-13B-dpo](https://huggingface.co/vicgalle/solarized-13B-dpo)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_vicgalle__solarized-13B-dpo\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-01-21T04:38:15.337905](https://huggingface.co/datasets/open-llm-leaderboard/details_vicgalle__solarized-13B-dpo/blob/main/results_2024-01-21T04-38-15.337905.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5894114296811551,\n\
\ \"acc_stderr\": 0.033439242720178564,\n \"acc_norm\": 0.595668575251732,\n\
\ \"acc_norm_stderr\": 0.034144224072911684,\n \"mc1\": 0.5079559363525091,\n\
\ \"mc1_stderr\": 0.017501285074551825,\n \"mc2\": 0.6624959612962921,\n\
\ \"mc2_stderr\": 0.01569484808694598\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6023890784982935,\n \"acc_stderr\": 0.014301752223279536,\n\
\ \"acc_norm\": 0.6271331058020477,\n \"acc_norm_stderr\": 0.014131176760131163\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6286596295558654,\n\
\ \"acc_stderr\": 0.004821757734156713,\n \"acc_norm\": 0.8181637124078869,\n\
\ \"acc_norm_stderr\": 0.0038492126228151687\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\
\ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5481481481481482,\n\
\ \"acc_stderr\": 0.042992689054808644,\n \"acc_norm\": 0.5481481481481482,\n\
\ \"acc_norm_stderr\": 0.042992689054808644\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n\
\ \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.67,\n\
\ \"acc_stderr\": 0.047258156262526094,\n \"acc_norm\": 0.67,\n \
\ \"acc_norm_stderr\": 0.047258156262526094\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6339622641509434,\n \"acc_stderr\": 0.02964781353936525,\n\
\ \"acc_norm\": 0.6339622641509434,\n \"acc_norm_stderr\": 0.02964781353936525\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6180555555555556,\n\
\ \"acc_stderr\": 0.040629907841466674,\n \"acc_norm\": 0.6180555555555556,\n\
\ \"acc_norm_stderr\": 0.040629907841466674\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\"\
: 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\"\
: {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \
\ \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n \
\ },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\"\
: 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n\
\ \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_medicine|5\"\
: {\n \"acc\": 0.5895953757225434,\n \"acc_stderr\": 0.03750757044895537,\n\
\ \"acc_norm\": 0.5895953757225434,\n \"acc_norm_stderr\": 0.03750757044895537\n\
\ },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3137254901960784,\n\
\ \"acc_stderr\": 0.04617034827006717,\n \"acc_norm\": 0.3137254901960784,\n\
\ \"acc_norm_stderr\": 0.04617034827006717\n },\n \"harness|hendrycksTest-computer_security|5\"\
: {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384739,\n \
\ \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384739\n \
\ },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5234042553191489,\n\
\ \"acc_stderr\": 0.03265019475033582,\n \"acc_norm\": 0.5234042553191489,\n\
\ \"acc_norm_stderr\": 0.03265019475033582\n },\n \"harness|hendrycksTest-econometrics|5\"\
: {\n \"acc\": 0.43859649122807015,\n \"acc_stderr\": 0.04668000738510455,\n\
\ \"acc_norm\": 0.43859649122807015,\n \"acc_norm_stderr\": 0.04668000738510455\n\
\ },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\"\
: 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n \"\
acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.3994708994708995,\n \"acc_stderr\": 0.025225450284067877,\n \"\
acc_norm\": 0.3994708994708995,\n \"acc_norm_stderr\": 0.025225450284067877\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3968253968253968,\n\
\ \"acc_stderr\": 0.043758884927270605,\n \"acc_norm\": 0.3968253968253968,\n\
\ \"acc_norm_stderr\": 0.043758884927270605\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \
\ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.6612903225806451,\n \"acc_stderr\": 0.02692344605930284,\n \"\
acc_norm\": 0.6612903225806451,\n \"acc_norm_stderr\": 0.02692344605930284\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.4433497536945813,\n \"acc_stderr\": 0.03495334582162934,\n \"\
acc_norm\": 0.4433497536945813,\n \"acc_norm_stderr\": 0.03495334582162934\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\"\
: 0.62,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7515151515151515,\n \"acc_stderr\": 0.033744026441394036,\n\
\ \"acc_norm\": 0.7515151515151515,\n \"acc_norm_stderr\": 0.033744026441394036\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"\
acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8341968911917098,\n \"acc_stderr\": 0.026839845022314415,\n\
\ \"acc_norm\": 0.8341968911917098,\n \"acc_norm_stderr\": 0.026839845022314415\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.558974358974359,\n \"acc_stderr\": 0.025174048384000745,\n \
\ \"acc_norm\": 0.558974358974359,\n \"acc_norm_stderr\": 0.025174048384000745\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3074074074074074,\n \"acc_stderr\": 0.028133252578815632,\n \
\ \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.028133252578815632\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.5840336134453782,\n \"acc_stderr\": 0.03201650100739611,\n \
\ \"acc_norm\": 0.5840336134453782,\n \"acc_norm_stderr\": 0.03201650100739611\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3443708609271523,\n \"acc_stderr\": 0.03879687024073327,\n \"\
acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.03879687024073327\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.7834862385321101,\n \"acc_stderr\": 0.017658710594443128,\n \"\
acc_norm\": 0.7834862385321101,\n \"acc_norm_stderr\": 0.017658710594443128\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.4583333333333333,\n \"acc_stderr\": 0.033981108902946366,\n \"\
acc_norm\": 0.4583333333333333,\n \"acc_norm_stderr\": 0.033981108902946366\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.803921568627451,\n \"acc_stderr\": 0.027865942286639318,\n \"\
acc_norm\": 0.803921568627451,\n \"acc_norm_stderr\": 0.027865942286639318\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7637130801687764,\n \"acc_stderr\": 0.02765215314415926,\n \
\ \"acc_norm\": 0.7637130801687764,\n \"acc_norm_stderr\": 0.02765215314415926\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6457399103139013,\n\
\ \"acc_stderr\": 0.03210062154134986,\n \"acc_norm\": 0.6457399103139013,\n\
\ \"acc_norm_stderr\": 0.03210062154134986\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.6793893129770993,\n \"acc_stderr\": 0.04093329229834278,\n\
\ \"acc_norm\": 0.6793893129770993,\n \"acc_norm_stderr\": 0.04093329229834278\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.8016528925619835,\n \"acc_stderr\": 0.036401182719909456,\n \"\
acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.036401182719909456\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n\
\ \"acc_stderr\": 0.04236511258094632,\n \"acc_norm\": 0.7407407407407407,\n\
\ \"acc_norm_stderr\": 0.04236511258094632\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.6380368098159509,\n \"acc_stderr\": 0.037757007291414416,\n\
\ \"acc_norm\": 0.6380368098159509,\n \"acc_norm_stderr\": 0.037757007291414416\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n\
\ \"acc_stderr\": 0.04745033255489122,\n \"acc_norm\": 0.5089285714285714,\n\
\ \"acc_norm_stderr\": 0.04745033255489122\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n\
\ \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8247863247863247,\n\
\ \"acc_stderr\": 0.02490443909891822,\n \"acc_norm\": 0.8247863247863247,\n\
\ \"acc_norm_stderr\": 0.02490443909891822\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \
\ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7803320561941252,\n\
\ \"acc_stderr\": 0.01480538447837116,\n \"acc_norm\": 0.7803320561941252,\n\
\ \"acc_norm_stderr\": 0.01480538447837116\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.5953757225433526,\n \"acc_stderr\": 0.02642481659400985,\n\
\ \"acc_norm\": 0.5953757225433526,\n \"acc_norm_stderr\": 0.02642481659400985\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.39329608938547483,\n\
\ \"acc_stderr\": 0.01633726869427009,\n \"acc_norm\": 0.39329608938547483,\n\
\ \"acc_norm_stderr\": 0.01633726869427009\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.6503267973856209,\n \"acc_stderr\": 0.027305308076274695,\n\
\ \"acc_norm\": 0.6503267973856209,\n \"acc_norm_stderr\": 0.027305308076274695\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6237942122186495,\n\
\ \"acc_stderr\": 0.02751392568354943,\n \"acc_norm\": 0.6237942122186495,\n\
\ \"acc_norm_stderr\": 0.02751392568354943\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.6882716049382716,\n \"acc_stderr\": 0.025773111169630453,\n\
\ \"acc_norm\": 0.6882716049382716,\n \"acc_norm_stderr\": 0.025773111169630453\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.43617021276595747,\n \"acc_stderr\": 0.02958345203628407,\n \
\ \"acc_norm\": 0.43617021276595747,\n \"acc_norm_stderr\": 0.02958345203628407\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4302477183833116,\n\
\ \"acc_stderr\": 0.012645361435115222,\n \"acc_norm\": 0.4302477183833116,\n\
\ \"acc_norm_stderr\": 0.012645361435115222\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.5992647058823529,\n \"acc_stderr\": 0.029768263528933105,\n\
\ \"acc_norm\": 0.5992647058823529,\n \"acc_norm_stderr\": 0.029768263528933105\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6013071895424836,\n \"acc_stderr\": 0.019808281317449848,\n \
\ \"acc_norm\": 0.6013071895424836,\n \"acc_norm_stderr\": 0.019808281317449848\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6181818181818182,\n\
\ \"acc_stderr\": 0.04653429807913508,\n \"acc_norm\": 0.6181818181818182,\n\
\ \"acc_norm_stderr\": 0.04653429807913508\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.5387755102040817,\n \"acc_stderr\": 0.031912820526692774,\n\
\ \"acc_norm\": 0.5387755102040817,\n \"acc_norm_stderr\": 0.031912820526692774\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.746268656716418,\n\
\ \"acc_stderr\": 0.030769444967296024,\n \"acc_norm\": 0.746268656716418,\n\
\ \"acc_norm_stderr\": 0.030769444967296024\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \
\ \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4819277108433735,\n\
\ \"acc_stderr\": 0.038899512528272166,\n \"acc_norm\": 0.4819277108433735,\n\
\ \"acc_norm_stderr\": 0.038899512528272166\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.7485380116959064,\n \"acc_stderr\": 0.033275044238468436,\n\
\ \"acc_norm\": 0.7485380116959064,\n \"acc_norm_stderr\": 0.033275044238468436\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5079559363525091,\n\
\ \"mc1_stderr\": 0.017501285074551825,\n \"mc2\": 0.6624959612962921,\n\
\ \"mc2_stderr\": 0.01569484808694598\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7600631412786109,\n \"acc_stderr\": 0.012002078629485742\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.26383623957543595,\n \
\ \"acc_stderr\": 0.012139386425126806\n }\n}\n```"
repo_url: https://huggingface.co/vicgalle/solarized-13B-dpo
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|arc:challenge|25_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|gsm8k|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hellaswag|10_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-01-21T04-38-15.337905.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-management|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-virology|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|truthfulqa:mc|0_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-01-21T04-38-15.337905.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- '**/details_harness|winogrande|5_2024-01-21T04-38-15.337905.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-01-21T04-38-15.337905.parquet'
- config_name: results
data_files:
- split: 2024_01_21T04_38_15.337905
path:
- results_2024-01-21T04-38-15.337905.parquet
- split: latest
path:
- results_2024-01-21T04-38-15.337905.parquet
---
# Dataset Card for Evaluation run of vicgalle/solarized-13B-dpo
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [vicgalle/solarized-13B-dpo](https://huggingface.co/vicgalle/solarized-13B-dpo) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_vicgalle__solarized-13B-dpo",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-01-21T04:38:15.337905](https://huggingface.co/datasets/open-llm-leaderboard/details_vicgalle__solarized-13B-dpo/blob/main/results_2024-01-21T04-38-15.337905.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.5894114296811551,
"acc_stderr": 0.033439242720178564,
"acc_norm": 0.595668575251732,
"acc_norm_stderr": 0.034144224072911684,
"mc1": 0.5079559363525091,
"mc1_stderr": 0.017501285074551825,
"mc2": 0.6624959612962921,
"mc2_stderr": 0.01569484808694598
},
"harness|arc:challenge|25": {
"acc": 0.6023890784982935,
"acc_stderr": 0.014301752223279536,
"acc_norm": 0.6271331058020477,
"acc_norm_stderr": 0.014131176760131163
},
"harness|hellaswag|10": {
"acc": 0.6286596295558654,
"acc_stderr": 0.004821757734156713,
"acc_norm": 0.8181637124078869,
"acc_norm_stderr": 0.0038492126228151687
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5481481481481482,
"acc_stderr": 0.042992689054808644,
"acc_norm": 0.5481481481481482,
"acc_norm_stderr": 0.042992689054808644
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6907894736842105,
"acc_stderr": 0.037610708698674805,
"acc_norm": 0.6907894736842105,
"acc_norm_stderr": 0.037610708698674805
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.67,
"acc_stderr": 0.047258156262526094,
"acc_norm": 0.67,
"acc_norm_stderr": 0.047258156262526094
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6339622641509434,
"acc_stderr": 0.02964781353936525,
"acc_norm": 0.6339622641509434,
"acc_norm_stderr": 0.02964781353936525
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.6180555555555556,
"acc_stderr": 0.040629907841466674,
"acc_norm": 0.6180555555555556,
"acc_norm_stderr": 0.040629907841466674
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.45,
"acc_stderr": 0.05,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.47,
"acc_stderr": 0.050161355804659205,
"acc_norm": 0.47,
"acc_norm_stderr": 0.050161355804659205
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.36,
"acc_stderr": 0.04824181513244218,
"acc_norm": 0.36,
"acc_norm_stderr": 0.04824181513244218
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.5895953757225434,
"acc_stderr": 0.03750757044895537,
"acc_norm": 0.5895953757225434,
"acc_norm_stderr": 0.03750757044895537
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3137254901960784,
"acc_stderr": 0.04617034827006717,
"acc_norm": 0.3137254901960784,
"acc_norm_stderr": 0.04617034827006717
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.73,
"acc_stderr": 0.04461960433384739,
"acc_norm": 0.73,
"acc_norm_stderr": 0.04461960433384739
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5234042553191489,
"acc_stderr": 0.03265019475033582,
"acc_norm": 0.5234042553191489,
"acc_norm_stderr": 0.03265019475033582
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.43859649122807015,
"acc_stderr": 0.04668000738510455,
"acc_norm": 0.43859649122807015,
"acc_norm_stderr": 0.04668000738510455
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5172413793103449,
"acc_stderr": 0.04164188720169375,
"acc_norm": 0.5172413793103449,
"acc_norm_stderr": 0.04164188720169375
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3994708994708995,
"acc_stderr": 0.025225450284067877,
"acc_norm": 0.3994708994708995,
"acc_norm_stderr": 0.025225450284067877
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.3968253968253968,
"acc_stderr": 0.043758884927270605,
"acc_norm": 0.3968253968253968,
"acc_norm_stderr": 0.043758884927270605
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.33,
"acc_stderr": 0.047258156262526045,
"acc_norm": 0.33,
"acc_norm_stderr": 0.047258156262526045
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.6612903225806451,
"acc_stderr": 0.02692344605930284,
"acc_norm": 0.6612903225806451,
"acc_norm_stderr": 0.02692344605930284
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4433497536945813,
"acc_stderr": 0.03495334582162934,
"acc_norm": 0.4433497536945813,
"acc_norm_stderr": 0.03495334582162934
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.62,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.62,
"acc_norm_stderr": 0.04878317312145632
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7515151515151515,
"acc_stderr": 0.033744026441394036,
"acc_norm": 0.7515151515151515,
"acc_norm_stderr": 0.033744026441394036
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7272727272727273,
"acc_stderr": 0.03173071239071724,
"acc_norm": 0.7272727272727273,
"acc_norm_stderr": 0.03173071239071724
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8341968911917098,
"acc_stderr": 0.026839845022314415,
"acc_norm": 0.8341968911917098,
"acc_norm_stderr": 0.026839845022314415
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.558974358974359,
"acc_stderr": 0.025174048384000745,
"acc_norm": 0.558974358974359,
"acc_norm_stderr": 0.025174048384000745
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3074074074074074,
"acc_stderr": 0.028133252578815632,
"acc_norm": 0.3074074074074074,
"acc_norm_stderr": 0.028133252578815632
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.5840336134453782,
"acc_stderr": 0.03201650100739611,
"acc_norm": 0.5840336134453782,
"acc_norm_stderr": 0.03201650100739611
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3443708609271523,
"acc_stderr": 0.03879687024073327,
"acc_norm": 0.3443708609271523,
"acc_norm_stderr": 0.03879687024073327
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7834862385321101,
"acc_stderr": 0.017658710594443128,
"acc_norm": 0.7834862385321101,
"acc_norm_stderr": 0.017658710594443128
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.4583333333333333,
"acc_stderr": 0.033981108902946366,
"acc_norm": 0.4583333333333333,
"acc_norm_stderr": 0.033981108902946366
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.803921568627451,
"acc_stderr": 0.027865942286639318,
"acc_norm": 0.803921568627451,
"acc_norm_stderr": 0.027865942286639318
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7637130801687764,
"acc_stderr": 0.02765215314415926,
"acc_norm": 0.7637130801687764,
"acc_norm_stderr": 0.02765215314415926
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6457399103139013,
"acc_stderr": 0.03210062154134986,
"acc_norm": 0.6457399103139013,
"acc_norm_stderr": 0.03210062154134986
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.6793893129770993,
"acc_stderr": 0.04093329229834278,
"acc_norm": 0.6793893129770993,
"acc_norm_stderr": 0.04093329229834278
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8016528925619835,
"acc_stderr": 0.036401182719909456,
"acc_norm": 0.8016528925619835,
"acc_norm_stderr": 0.036401182719909456
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7407407407407407,
"acc_stderr": 0.04236511258094632,
"acc_norm": 0.7407407407407407,
"acc_norm_stderr": 0.04236511258094632
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.6380368098159509,
"acc_stderr": 0.037757007291414416,
"acc_norm": 0.6380368098159509,
"acc_norm_stderr": 0.037757007291414416
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5089285714285714,
"acc_stderr": 0.04745033255489122,
"acc_norm": 0.5089285714285714,
"acc_norm_stderr": 0.04745033255489122
},
"harness|hendrycksTest-management|5": {
"acc": 0.7961165048543689,
"acc_stderr": 0.039891398595317706,
"acc_norm": 0.7961165048543689,
"acc_norm_stderr": 0.039891398595317706
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8247863247863247,
"acc_stderr": 0.02490443909891822,
"acc_norm": 0.8247863247863247,
"acc_norm_stderr": 0.02490443909891822
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.7803320561941252,
"acc_stderr": 0.01480538447837116,
"acc_norm": 0.7803320561941252,
"acc_norm_stderr": 0.01480538447837116
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.5953757225433526,
"acc_stderr": 0.02642481659400985,
"acc_norm": 0.5953757225433526,
"acc_norm_stderr": 0.02642481659400985
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.39329608938547483,
"acc_stderr": 0.01633726869427009,
"acc_norm": 0.39329608938547483,
"acc_norm_stderr": 0.01633726869427009
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.6503267973856209,
"acc_stderr": 0.027305308076274695,
"acc_norm": 0.6503267973856209,
"acc_norm_stderr": 0.027305308076274695
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6237942122186495,
"acc_stderr": 0.02751392568354943,
"acc_norm": 0.6237942122186495,
"acc_norm_stderr": 0.02751392568354943
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.6882716049382716,
"acc_stderr": 0.025773111169630453,
"acc_norm": 0.6882716049382716,
"acc_norm_stderr": 0.025773111169630453
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.43617021276595747,
"acc_stderr": 0.02958345203628407,
"acc_norm": 0.43617021276595747,
"acc_norm_stderr": 0.02958345203628407
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.4302477183833116,
"acc_stderr": 0.012645361435115222,
"acc_norm": 0.4302477183833116,
"acc_norm_stderr": 0.012645361435115222
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.5992647058823529,
"acc_stderr": 0.029768263528933105,
"acc_norm": 0.5992647058823529,
"acc_norm_stderr": 0.029768263528933105
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6013071895424836,
"acc_stderr": 0.019808281317449848,
"acc_norm": 0.6013071895424836,
"acc_norm_stderr": 0.019808281317449848
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6181818181818182,
"acc_stderr": 0.04653429807913508,
"acc_norm": 0.6181818181818182,
"acc_norm_stderr": 0.04653429807913508
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.5387755102040817,
"acc_stderr": 0.031912820526692774,
"acc_norm": 0.5387755102040817,
"acc_norm_stderr": 0.031912820526692774
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.746268656716418,
"acc_stderr": 0.030769444967296024,
"acc_norm": 0.746268656716418,
"acc_norm_stderr": 0.030769444967296024
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.75,
"acc_stderr": 0.04351941398892446,
"acc_norm": 0.75,
"acc_norm_stderr": 0.04351941398892446
},
"harness|hendrycksTest-virology|5": {
"acc": 0.4819277108433735,
"acc_stderr": 0.038899512528272166,
"acc_norm": 0.4819277108433735,
"acc_norm_stderr": 0.038899512528272166
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.7485380116959064,
"acc_stderr": 0.033275044238468436,
"acc_norm": 0.7485380116959064,
"acc_norm_stderr": 0.033275044238468436
},
"harness|truthfulqa:mc|0": {
"mc1": 0.5079559363525091,
"mc1_stderr": 0.017501285074551825,
"mc2": 0.6624959612962921,
"mc2_stderr": 0.01569484808694598
},
"harness|winogrande|5": {
"acc": 0.7600631412786109,
"acc_stderr": 0.012002078629485742
},
"harness|gsm8k|5": {
"acc": 0.26383623957543595,
"acc_stderr": 0.012139386425126806
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
wid4soe/182-final-proj | ---
dataset_info:
features:
- name: original_image
dtype: string
- name: edit_prompt
dtype: string
- name: new_image
dtype: string
splits:
- name: test
num_bytes: 18956
num_examples: 161
- name: validation
num_bytes: 6584
num_examples: 55
- name: train
num_bytes: 68238
num_examples: 570
download_size: 34004
dataset_size: 93778
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
mugithi/ubuntu_question_answer | ---
dataset_info:
features:
- name: question
dtype: string
- name: answer
dtype: string
splits:
- name: train
num_bytes: 2051467
num_examples: 12024
- name: test
num_bytes: 883252
num_examples: 5154
download_size: 1928151
dataset_size: 2934719
---
# Dataset Card for "ubuntu_question_answer"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
result-kand2-sdxl-wuerst-karlo/dc1d52d8 | ---
dataset_info:
features:
- name: result
dtype: string
- name: id
dtype: int64
splits:
- name: train
num_bytes: 168
num_examples: 10
download_size: 1321
dataset_size: 168
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "dc1d52d8"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
rosimeirecosta/c_corpus_br_finetuning_language_model_deberta | ---
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 36065567
num_examples: 228736
- name: validation
num_bytes: 9012563
num_examples: 57184
download_size: 0
dataset_size: 45078130
---
# Dataset Card for "c_corpus_br_finetuning_language_model_deberta"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
open-llm-leaderboard/details_Gille__StrangeMerges_18-7B-dare_ties | ---
pretty_name: Evaluation run of Gille/StrangeMerges_18-7B-dare_ties
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [Gille/StrangeMerges_18-7B-dare_ties](https://huggingface.co/Gille/StrangeMerges_18-7B-dare_ties)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Gille__StrangeMerges_18-7B-dare_ties\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-02-02T02:26:03.631353](https://huggingface.co/datasets/open-llm-leaderboard/details_Gille__StrangeMerges_18-7B-dare_ties/blob/main/results_2024-02-02T02-26-03.631353.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.638141011794225,\n\
\ \"acc_stderr\": 0.03221763395931037,\n \"acc_norm\": 0.640122595431434,\n\
\ \"acc_norm_stderr\": 0.032859222090384846,\n \"mc1\": 0.3574051407588739,\n\
\ \"mc1_stderr\": 0.0167765996767294,\n \"mc2\": 0.521661560648742,\n\
\ \"mc2_stderr\": 0.015256495321750132\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6109215017064846,\n \"acc_stderr\": 0.014247309976045607,\n\
\ \"acc_norm\": 0.6407849829351536,\n \"acc_norm_stderr\": 0.014020224155839162\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6521609241187014,\n\
\ \"acc_stderr\": 0.004753112432728698,\n \"acc_norm\": 0.8436566421031667,\n\
\ \"acc_norm_stderr\": 0.003624383120823463\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \
\ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5925925925925926,\n\
\ \"acc_stderr\": 0.04244633238353227,\n \"acc_norm\": 0.5925925925925926,\n\
\ \"acc_norm_stderr\": 0.04244633238353227\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n\
\ \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n\
\ \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \
\ \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.028727502957880267,\n\
\ \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880267\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n\
\ \"acc_stderr\": 0.034765901043041336,\n \"acc_norm\": 0.7777777777777778,\n\
\ \"acc_norm_stderr\": 0.034765901043041336\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \
\ \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n\
\ \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6358381502890174,\n\
\ \"acc_stderr\": 0.03669072477416907,\n \"acc_norm\": 0.6358381502890174,\n\
\ \"acc_norm_stderr\": 0.03669072477416907\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n\
\ \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n\
\ \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5702127659574469,\n \"acc_stderr\": 0.03236214467715564,\n\
\ \"acc_norm\": 0.5702127659574469,\n \"acc_norm_stderr\": 0.03236214467715564\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n\
\ \"acc_stderr\": 0.046854730419077895,\n \"acc_norm\": 0.45614035087719296,\n\
\ \"acc_norm_stderr\": 0.046854730419077895\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n\
\ \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.41534391534391535,\n \"acc_stderr\": 0.0253795249107784,\n \"\
acc_norm\": 0.41534391534391535,\n \"acc_norm_stderr\": 0.0253795249107784\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n\
\ \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n\
\ \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \
\ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\
\ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7741935483870968,\n\
\ \"acc_stderr\": 0.023785577884181012,\n \"acc_norm\": 0.7741935483870968,\n\
\ \"acc_norm_stderr\": 0.023785577884181012\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n\
\ \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\"\
: 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.032568666616811015,\n\
\ \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.032568666616811015\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7929292929292929,\n \"acc_stderr\": 0.02886977846026705,\n \"\
acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.02886977846026705\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919446,\n\
\ \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919446\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6153846153846154,\n \"acc_stderr\": 0.024666744915187208,\n\
\ \"acc_norm\": 0.6153846153846154,\n \"acc_norm_stderr\": 0.024666744915187208\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3074074074074074,\n \"acc_stderr\": 0.02813325257881563,\n \
\ \"acc_norm\": 0.3074074074074074,\n \"acc_norm_stderr\": 0.02813325257881563\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n\
\ \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"\
acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8348623853211009,\n \"acc_stderr\": 0.015919557829976044,\n \"\
acc_norm\": 0.8348623853211009,\n \"acc_norm_stderr\": 0.015919557829976044\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5324074074074074,\n \"acc_stderr\": 0.03402801581358966,\n \"\
acc_norm\": 0.5324074074074074,\n \"acc_norm_stderr\": 0.03402801581358966\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078962,\n \"\
acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078962\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290916,\n \
\ \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290916\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n\
\ \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n\
\ \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596913,\n\
\ \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596913\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"\
acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n\
\ \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n\
\ \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.8220858895705522,\n \"acc_stderr\": 0.03004735765580663,\n\
\ \"acc_norm\": 0.8220858895705522,\n \"acc_norm_stderr\": 0.03004735765580663\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n\
\ \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n\
\ \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n\
\ \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n\
\ \"acc_stderr\": 0.02280138253459754,\n \"acc_norm\": 0.8589743589743589,\n\
\ \"acc_norm_stderr\": 0.02280138253459754\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.71,\n \"acc_stderr\": 0.04560480215720684,\n \
\ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.04560480215720684\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8173690932311622,\n\
\ \"acc_stderr\": 0.013816335389973136,\n \"acc_norm\": 0.8173690932311622,\n\
\ \"acc_norm_stderr\": 0.013816335389973136\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.024105712607754307,\n\
\ \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.024105712607754307\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.30614525139664805,\n\
\ \"acc_stderr\": 0.015414494487903217,\n \"acc_norm\": 0.30614525139664805,\n\
\ \"acc_norm_stderr\": 0.015414494487903217\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.738562091503268,\n \"acc_stderr\": 0.025160998214292452,\n\
\ \"acc_norm\": 0.738562091503268,\n \"acc_norm_stderr\": 0.025160998214292452\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n\
\ \"acc_stderr\": 0.02592237178881877,\n \"acc_norm\": 0.7041800643086816,\n\
\ \"acc_norm_stderr\": 0.02592237178881877\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7283950617283951,\n \"acc_stderr\": 0.02474862449053737,\n\
\ \"acc_norm\": 0.7283950617283951,\n \"acc_norm_stderr\": 0.02474862449053737\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.5,\n \"acc_stderr\": 0.029827499313594685,\n \"acc_norm\"\
: 0.5,\n \"acc_norm_stderr\": 0.029827499313594685\n },\n \"harness|hendrycksTest-professional_law|5\"\
: {\n \"acc\": 0.46870925684485004,\n \"acc_stderr\": 0.012745204626083138,\n\
\ \"acc_norm\": 0.46870925684485004,\n \"acc_norm_stderr\": 0.012745204626083138\n\
\ },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\"\
: 0.6801470588235294,\n \"acc_stderr\": 0.02833295951403121,\n \"\
acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.02833295951403121\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6666666666666666,\n \"acc_stderr\": 0.019070985589687492,\n \
\ \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.019070985589687492\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n\
\ \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.6363636363636364,\n\
\ \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274645,\n\
\ \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274645\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n\
\ \"acc_stderr\": 0.02650859065623327,\n \"acc_norm\": 0.8308457711442786,\n\
\ \"acc_norm_stderr\": 0.02650859065623327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \
\ \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n\
\ \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n\
\ \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n\
\ \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3574051407588739,\n\
\ \"mc1_stderr\": 0.0167765996767294,\n \"mc2\": 0.521661560648742,\n\
\ \"mc2_stderr\": 0.015256495321750132\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7726913970007893,\n \"acc_stderr\": 0.011778612167091087\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6080363912054587,\n \
\ \"acc_stderr\": 0.013447140886023817\n }\n}\n```"
repo_url: https://huggingface.co/Gille/StrangeMerges_18-7B-dare_ties
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|arc:challenge|25_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|gsm8k|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hellaswag|10_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-02T02-26-03.631353.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-02T02-26-03.631353.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- '**/details_harness|winogrande|5_2024-02-02T02-26-03.631353.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-02-02T02-26-03.631353.parquet'
- config_name: results
data_files:
- split: 2024_02_02T02_26_03.631353
path:
- results_2024-02-02T02-26-03.631353.parquet
- split: latest
path:
- results_2024-02-02T02-26-03.631353.parquet
---
# Dataset Card for Evaluation run of Gille/StrangeMerges_18-7B-dare_ties
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [Gille/StrangeMerges_18-7B-dare_ties](https://huggingface.co/Gille/StrangeMerges_18-7B-dare_ties) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_Gille__StrangeMerges_18-7B-dare_ties",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-02T02:26:03.631353](https://huggingface.co/datasets/open-llm-leaderboard/details_Gille__StrangeMerges_18-7B-dare_ties/blob/main/results_2024-02-02T02-26-03.631353.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.638141011794225,
"acc_stderr": 0.03221763395931037,
"acc_norm": 0.640122595431434,
"acc_norm_stderr": 0.032859222090384846,
"mc1": 0.3574051407588739,
"mc1_stderr": 0.0167765996767294,
"mc2": 0.521661560648742,
"mc2_stderr": 0.015256495321750132
},
"harness|arc:challenge|25": {
"acc": 0.6109215017064846,
"acc_stderr": 0.014247309976045607,
"acc_norm": 0.6407849829351536,
"acc_norm_stderr": 0.014020224155839162
},
"harness|hellaswag|10": {
"acc": 0.6521609241187014,
"acc_stderr": 0.004753112432728698,
"acc_norm": 0.8436566421031667,
"acc_norm_stderr": 0.003624383120823463
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542128,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542128
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5925925925925926,
"acc_stderr": 0.04244633238353227,
"acc_norm": 0.5925925925925926,
"acc_norm_stderr": 0.04244633238353227
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7039473684210527,
"acc_stderr": 0.03715062154998904,
"acc_norm": 0.7039473684210527,
"acc_norm_stderr": 0.03715062154998904
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.59,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.59,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6792452830188679,
"acc_stderr": 0.028727502957880267,
"acc_norm": 0.6792452830188679,
"acc_norm_stderr": 0.028727502957880267
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.034765901043041336,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.034765901043041336
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.46,
"acc_stderr": 0.05009082659620333,
"acc_norm": 0.46,
"acc_norm_stderr": 0.05009082659620333
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.35,
"acc_stderr": 0.04793724854411019,
"acc_norm": 0.35,
"acc_norm_stderr": 0.04793724854411019
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6358381502890174,
"acc_stderr": 0.03669072477416907,
"acc_norm": 0.6358381502890174,
"acc_norm_stderr": 0.03669072477416907
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.38235294117647056,
"acc_stderr": 0.04835503696107223,
"acc_norm": 0.38235294117647056,
"acc_norm_stderr": 0.04835503696107223
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.042923469599092816,
"acc_norm": 0.76,
"acc_norm_stderr": 0.042923469599092816
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5702127659574469,
"acc_stderr": 0.03236214467715564,
"acc_norm": 0.5702127659574469,
"acc_norm_stderr": 0.03236214467715564
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.45614035087719296,
"acc_stderr": 0.046854730419077895,
"acc_norm": 0.45614035087719296,
"acc_norm_stderr": 0.046854730419077895
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5172413793103449,
"acc_stderr": 0.04164188720169375,
"acc_norm": 0.5172413793103449,
"acc_norm_stderr": 0.04164188720169375
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.41534391534391535,
"acc_stderr": 0.0253795249107784,
"acc_norm": 0.41534391534391535,
"acc_norm_stderr": 0.0253795249107784
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4523809523809524,
"acc_stderr": 0.044518079590553275,
"acc_norm": 0.4523809523809524,
"acc_norm_stderr": 0.044518079590553275
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.37,
"acc_stderr": 0.048523658709391,
"acc_norm": 0.37,
"acc_norm_stderr": 0.048523658709391
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7741935483870968,
"acc_stderr": 0.023785577884181012,
"acc_norm": 0.7741935483870968,
"acc_norm_stderr": 0.023785577884181012
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5024630541871922,
"acc_stderr": 0.03517945038691063,
"acc_norm": 0.5024630541871922,
"acc_norm_stderr": 0.03517945038691063
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.66,
"acc_stderr": 0.04760952285695237,
"acc_norm": 0.66,
"acc_norm_stderr": 0.04760952285695237
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7757575757575758,
"acc_stderr": 0.032568666616811015,
"acc_norm": 0.7757575757575758,
"acc_norm_stderr": 0.032568666616811015
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7929292929292929,
"acc_stderr": 0.02886977846026705,
"acc_norm": 0.7929292929292929,
"acc_norm_stderr": 0.02886977846026705
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8860103626943006,
"acc_stderr": 0.022935144053919446,
"acc_norm": 0.8860103626943006,
"acc_norm_stderr": 0.022935144053919446
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6153846153846154,
"acc_stderr": 0.024666744915187208,
"acc_norm": 0.6153846153846154,
"acc_norm_stderr": 0.024666744915187208
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3074074074074074,
"acc_stderr": 0.02813325257881563,
"acc_norm": 0.3074074074074074,
"acc_norm_stderr": 0.02813325257881563
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6848739495798319,
"acc_stderr": 0.030176808288974337,
"acc_norm": 0.6848739495798319,
"acc_norm_stderr": 0.030176808288974337
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.33774834437086093,
"acc_stderr": 0.03861557546255169,
"acc_norm": 0.33774834437086093,
"acc_norm_stderr": 0.03861557546255169
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8348623853211009,
"acc_stderr": 0.015919557829976044,
"acc_norm": 0.8348623853211009,
"acc_norm_stderr": 0.015919557829976044
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5324074074074074,
"acc_stderr": 0.03402801581358966,
"acc_norm": 0.5324074074074074,
"acc_norm_stderr": 0.03402801581358966
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8235294117647058,
"acc_stderr": 0.026756401538078962,
"acc_norm": 0.8235294117647058,
"acc_norm_stderr": 0.026756401538078962
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8059071729957806,
"acc_stderr": 0.025744902532290916,
"acc_norm": 0.8059071729957806,
"acc_norm_stderr": 0.025744902532290916
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6816143497757847,
"acc_stderr": 0.03126580522513713,
"acc_norm": 0.6816143497757847,
"acc_norm_stderr": 0.03126580522513713
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7633587786259542,
"acc_stderr": 0.03727673575596913,
"acc_norm": 0.7633587786259542,
"acc_norm_stderr": 0.03727673575596913
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8099173553719008,
"acc_stderr": 0.03581796951709282,
"acc_norm": 0.8099173553719008,
"acc_norm_stderr": 0.03581796951709282
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7777777777777778,
"acc_stderr": 0.0401910747255735,
"acc_norm": 0.7777777777777778,
"acc_norm_stderr": 0.0401910747255735
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.8220858895705522,
"acc_stderr": 0.03004735765580663,
"acc_norm": 0.8220858895705522,
"acc_norm_stderr": 0.03004735765580663
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.5089285714285714,
"acc_stderr": 0.04745033255489123,
"acc_norm": 0.5089285714285714,
"acc_norm_stderr": 0.04745033255489123
},
"harness|hendrycksTest-management|5": {
"acc": 0.7864077669902912,
"acc_stderr": 0.040580420156460344,
"acc_norm": 0.7864077669902912,
"acc_norm_stderr": 0.040580420156460344
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8589743589743589,
"acc_stderr": 0.02280138253459754,
"acc_norm": 0.8589743589743589,
"acc_norm_stderr": 0.02280138253459754
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.71,
"acc_stderr": 0.04560480215720684,
"acc_norm": 0.71,
"acc_norm_stderr": 0.04560480215720684
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8173690932311622,
"acc_stderr": 0.013816335389973136,
"acc_norm": 0.8173690932311622,
"acc_norm_stderr": 0.013816335389973136
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7225433526011561,
"acc_stderr": 0.024105712607754307,
"acc_norm": 0.7225433526011561,
"acc_norm_stderr": 0.024105712607754307
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.30614525139664805,
"acc_stderr": 0.015414494487903217,
"acc_norm": 0.30614525139664805,
"acc_norm_stderr": 0.015414494487903217
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.738562091503268,
"acc_stderr": 0.025160998214292452,
"acc_norm": 0.738562091503268,
"acc_norm_stderr": 0.025160998214292452
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7041800643086816,
"acc_stderr": 0.02592237178881877,
"acc_norm": 0.7041800643086816,
"acc_norm_stderr": 0.02592237178881877
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7283950617283951,
"acc_stderr": 0.02474862449053737,
"acc_norm": 0.7283950617283951,
"acc_norm_stderr": 0.02474862449053737
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.5,
"acc_stderr": 0.029827499313594685,
"acc_norm": 0.5,
"acc_norm_stderr": 0.029827499313594685
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.46870925684485004,
"acc_stderr": 0.012745204626083138,
"acc_norm": 0.46870925684485004,
"acc_norm_stderr": 0.012745204626083138
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6801470588235294,
"acc_stderr": 0.02833295951403121,
"acc_norm": 0.6801470588235294,
"acc_norm_stderr": 0.02833295951403121
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6666666666666666,
"acc_stderr": 0.019070985589687492,
"acc_norm": 0.6666666666666666,
"acc_norm_stderr": 0.019070985589687492
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6363636363636364,
"acc_stderr": 0.04607582090719976,
"acc_norm": 0.6363636363636364,
"acc_norm_stderr": 0.04607582090719976
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7224489795918367,
"acc_stderr": 0.028666857790274645,
"acc_norm": 0.7224489795918367,
"acc_norm_stderr": 0.028666857790274645
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8308457711442786,
"acc_stderr": 0.02650859065623327,
"acc_norm": 0.8308457711442786,
"acc_norm_stderr": 0.02650859065623327
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.87,
"acc_stderr": 0.033799766898963086,
"acc_norm": 0.87,
"acc_norm_stderr": 0.033799766898963086
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5602409638554217,
"acc_stderr": 0.03864139923699122,
"acc_norm": 0.5602409638554217,
"acc_norm_stderr": 0.03864139923699122
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8245614035087719,
"acc_stderr": 0.029170885500727665,
"acc_norm": 0.8245614035087719,
"acc_norm_stderr": 0.029170885500727665
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3574051407588739,
"mc1_stderr": 0.0167765996767294,
"mc2": 0.521661560648742,
"mc2_stderr": 0.015256495321750132
},
"harness|winogrande|5": {
"acc": 0.7726913970007893,
"acc_stderr": 0.011778612167091087
},
"harness|gsm8k|5": {
"acc": 0.6080363912054587,
"acc_stderr": 0.013447140886023817
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
Jiahuan/vox_arta_lego | ---
dataset_info:
features:
- name: input
dtype: string
- name: output
dtype: string
splits:
- name: train
num_bytes: 951157
num_examples: 622
- name: validation
num_bytes: 97449
num_examples: 70
- name: test
num_bytes: 246386
num_examples: 174
download_size: 424416
dataset_size: 1294992
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
---
|
distilled-from-one-sec-cv12/chunk_123 | ---
dataset_info:
features:
- name: logits
sequence: float32
- name: mfcc
sequence:
sequence: float64
splits:
- name: train
num_bytes: 1579501300
num_examples: 307775
download_size: 1611421361
dataset_size: 1579501300
---
# Dataset Card for "chunk_123"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
mask-distilled-onesec-cv12-each-chunk-uniq/chunk_214 | ---
dataset_info:
features:
- name: logits
sequence: float32
- name: mfcc
sequence:
sequence: float64
splits:
- name: train
num_bytes: 1114694812.0
num_examples: 218911
download_size: 1138044562
dataset_size: 1114694812.0
---
# Dataset Card for "chunk_214"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
ALPATNKM/APN | ---
license: odc-by
---
|
akadhim-ai/ios_icons_2 | ---
dataset_info:
features:
- name: image
dtype: image
- name: text
dtype: string
splits:
- name: train
num_bytes: 768688.0
num_examples: 10
download_size: 769873
dataset_size: 768688.0
---
# Dataset Card for "ios_icons_2"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
heliosprime/twitter_dataset_1713041875 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 14114
num_examples: 32
download_size: 9964
dataset_size: 14114
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "twitter_dataset_1713041875"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
tyzhu/random_letter_same_length_find_passage_train10_eval20_rare | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
dataset_info:
features:
- name: inputs
dtype: string
- name: targets
dtype: string
splits:
- name: train
num_bytes: 14103
num_examples: 40
- name: validation
num_bytes: 7301
num_examples: 20
download_size: 19959
dataset_size: 21404
---
# Dataset Card for "random_letter_same_length_find_passage_train10_eval20_rare"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
yashnbx/gita_supersite_sanskrit_tts | ---
dataset_info:
features:
- name: shloka_id
dtype: string
- name: text
dtype: string
- name: audio
dtype: audio
splits:
- name: train
num_bytes: 25244323.0
num_examples: 701
download_size: 24905370
dataset_size: 25244323.0
---
# Dataset Card for "gita_supersite_sanskrit_tts"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
cyrilzhang/TinyStories2-ascii | ---
license: cdla-sharing-1.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
dataset_info:
features:
- name: text
dtype: string
splits:
- name: train
num_bytes: 2188147629.6032934
num_examples: 2708167
- name: validation
num_bytes: 22108524.398827404
num_examples: 27544
download_size: 1143073704
dataset_size: 2210256154.002121
---
# Dataset Card for "TinyStories2-ascii"
- `TinyStoriesV2-GPT4-{train,validation}.txt` from [roneneldan/TinyStories](https://huggingface.co/datasets/roneneldan/TinyStories)
- ad-hoc Unicode -> ASCII normalization
- remove empty/incomplete stories |
lyon-nlp/alloprof | ---
task_categories:
- text-classification
- question-answering
language:
- fr
pretty_name: alloprof
size_categories:
- 1K<n<10K
configs:
- config_name: documents
data_files: documents.json
- config_name: queries
data_files: queries.json
---
This is a re-edit from the Alloprof dataset (which can be found here : https://huggingface.co/datasets/antoinelb7/alloprof).
For more information about the data source and the features, please refer to the original dataset card made by the authors, along with their paper available here : https://arxiv.org/abs/2302.07738
This re-edition of the dataset is a preprocessed version of the original, **in a more ready-to-use format**. Essentially, the texts have been cleaned, and data not usable for retrieval has been discarded.
### Why a re-edition ?
It has been made for easier usage in the MTEB benchmarking pipeline in order to contribute in the MTEB leaderboard : https://huggingface.co/spaces/mteb/leaderboard.
For more information about the project, please refer to the associated paper : https://arxiv.org/pdf/2210.07316.pdf
### Usage
To use the dataset, you need to specify the subset you want (documents or queries) when calling the load_dataset() method.
For example, to get the queries use :
```py
from datasets import load_dataset
dataset = load_dataset("lyon-nlp/alloprof", "queries")
```
|
result-kand2-sdxl-wuerst-karlo/694df328 | ---
dataset_info:
features:
- name: result
dtype: string
- name: id
dtype: int64
splits:
- name: train
num_bytes: 162
num_examples: 10
download_size: 1318
dataset_size: 162
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "694df328"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
awghuku/infore25 | ---
license: cc-by-4.0
---
|
SJTU-TES/TSP | ---
license: apache-2.0
---
|
tuanacanal/conversation-ds-mini | ---
dataset_info:
features:
- name: 'Unnamed: 0'
dtype: int64
- name: question
dtype: string
- name: answer
dtype: string
splits:
- name: train
num_bytes: 266461.0534228188
num_examples: 3352
- name: validation
num_bytes: 29650.946577181207
num_examples: 373
download_size: 204622
dataset_size: 296112.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
---
|
BlodyTraveler/4x-UltraSharp | ---
license: unknown
---
|
arieg/bw_spec_cls_4_18_noise_200 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': '1666'
'1': '1673'
'2': '1680'
'3': '1681'
splits:
- name: train
num_bytes: 47436633.0
num_examples: 800
- name: test
num_bytes: 1187175.0
num_examples: 20
download_size: 26409307
dataset_size: 48623808.0
---
# Dataset Card for "bw_spec_cls_4_18_noise_200"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
CyberHarem/quinella_swordartonline | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of quinella (Sword Art Online)
This is the dataset of quinella (Sword Art Online), containing 152 images and their tags.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
|
Aaryan333/fer2013_train_publicTest_privateTest | ---
dataset_info:
features:
- name: label
dtype:
class_label:
names:
'0': Angry
'1': Disgust
'2': Fear
'3': Happy
'4': Sad
'5': Surprise
'6': Neutral
- name: image
dtype: image
splits:
- name: train
num_bytes: 106750555.375
num_examples: 28709
- name: publicTest
num_bytes: 13383908.375
num_examples: 3589
- name: privateTest
num_bytes: 13384809.375
num_examples: 3589
download_size: 133185182
dataset_size: 133519273.125
---
# Dataset Card for "fer2013_train_publicTest_privateTest"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
mask-distilled-one-sec-cv12/chunk_11 | ---
dataset_info:
features:
- name: logits
sequence: float32
- name: mfcc
sequence:
sequence: float64
splits:
- name: train
num_bytes: 1003694304
num_examples: 197112
download_size: 1020329415
dataset_size: 1003694304
---
# Dataset Card for "chunk_11"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
homangab/robotlarge | ---
dataset_info:
features:
- name: pixel_values
dtype: image
- name: label
dtype: image
splits:
- name: train
num_bytes: 70799794.0
num_examples: 168
download_size: 5701250
dataset_size: 70799794.0
---
# Dataset Card for "robotlarge"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Cohere/miracl-bn-queries-22-12 | ---
annotations_creators:
- expert-generated
language:
- bn
multilinguality:
- multilingual
size_categories: []
source_datasets: []
tags: []
task_categories:
- text-retrieval
license:
- apache-2.0
task_ids:
- document-retrieval
---
# MIRACL (bn) embedded with cohere.ai `multilingual-22-12` encoder
We encoded the [MIRACL dataset](https://huggingface.co/miracl) using the [cohere.ai](https://txt.cohere.ai/multilingual/) `multilingual-22-12` embedding model.
The query embeddings can be found in [Cohere/miracl-bn-queries-22-12](https://huggingface.co/datasets/Cohere/miracl-bn-queries-22-12) and the corpus embeddings can be found in [Cohere/miracl-bn-corpus-22-12](https://huggingface.co/datasets/Cohere/miracl-bn-corpus-22-12).
For the orginal datasets, see [miracl/miracl](https://huggingface.co/datasets/miracl/miracl) and [miracl/miracl-corpus](https://huggingface.co/datasets/miracl/miracl-corpus).
Dataset info:
> MIRACL ๐๐๐ (Multilingual Information Retrieval Across a Continuum of Languages) is a multilingual retrieval dataset that focuses on search across 18 different languages, which collectively encompass over three billion native speakers around the world.
>
> The corpus for each language is prepared from a Wikipedia dump, where we keep only the plain text and discard images, tables, etc. Each article is segmented into multiple passages using WikiExtractor based on natural discourse units (e.g., `\n\n` in the wiki markup). Each of these passages comprises a "document" or unit of retrieval. We preserve the Wikipedia article title of each passage.
## Embeddings
We compute for `title+" "+text` the embeddings using our `multilingual-22-12` embedding model, a state-of-the-art model that works for semantic search in 100 languages. If you want to learn more about this model, have a look at [cohere.ai multilingual embedding model](https://txt.cohere.ai/multilingual/).
## Loading the dataset
In [miracl-bn-corpus-22-12](https://huggingface.co/datasets/Cohere/miracl-bn-corpus-22-12) we provide the corpus embeddings. Note, depending on the selected split, the respective files can be quite large.
You can either load the dataset like this:
```python
from datasets import load_dataset
docs = load_dataset(f"Cohere/miracl-bn-corpus-22-12", split="train")
```
Or you can also stream it without downloading it before:
```python
from datasets import load_dataset
docs = load_dataset(f"Cohere/miracl-bn-corpus-22-12", split="train", streaming=True)
for doc in docs:
docid = doc['docid']
title = doc['title']
text = doc['text']
emb = doc['emb']
```
## Search
Have a look at [miracl-bn-queries-22-12](https://huggingface.co/datasets/Cohere/miracl-bn-queries-22-12) where we provide the query embeddings for the MIRACL dataset.
To search in the documents, you must use **dot-product**.
And then compare this query embeddings either with a vector database (recommended) or directly computing the dot product.
A full search example:
```python
# Attention! For large datasets, this requires a lot of memory to store
# all document embeddings and to compute the dot product scores.
# Only use this for smaller datasets. For large datasets, use a vector DB
from datasets import load_dataset
import torch
#Load documents + embeddings
docs = load_dataset(f"Cohere/miracl-bn-corpus-22-12", split="train")
doc_embeddings = torch.tensor(docs['emb'])
# Load queries
queries = load_dataset(f"Cohere/miracl-bn-queries-22-12", split="dev")
# Select the first query as example
qid = 0
query = queries[qid]
query_embedding = torch.tensor(queries['emb'])
# Compute dot score between query embedding and document embeddings
dot_scores = torch.mm(query_embedding, doc_embeddings.transpose(0, 1))
top_k = torch.topk(dot_scores, k=3)
# Print results
print("Query:", query['query'])
for doc_id in top_k.indices[0].tolist():
print(docs[doc_id]['title'])
print(docs[doc_id]['text'])
```
You can get embeddings for new queries using our API:
```python
#Run: pip install cohere
import cohere
co = cohere.Client(f"{api_key}") # You should add your cohere API Key here :))
texts = ['my search query']
response = co.embed(texts=texts, model='multilingual-22-12')
query_embedding = response.embeddings[0] # Get the embedding for the first text
```
## Performance
In the following table we compare the cohere multilingual-22-12 model with Elasticsearch version 8.6.0 lexical search (title and passage indexed as independent fields). Note that Elasticsearch doesn't support all languages that are part of the MIRACL dataset.
We compute nDCG@10 (a ranking based loss), as well as hit@3: Is at least one relevant document in the top-3 results. We find that hit@3 is easier to interpret, as it presents the number of queries for which a relevant document is found among the top-3 results.
Note: MIRACL only annotated a small fraction of passages (10 per query) for relevancy. Especially for larger Wikipedias (like English), we often found many more relevant passages. This is know as annotation holes. Real nDCG@10 and hit@3 performance is likely higher than depicted.
| Model | cohere multilingual-22-12 nDCG@10 | cohere multilingual-22-12 hit@3 | ES 8.6.0 nDCG@10 | ES 8.6.0 acc@3 |
|---|---|---|---|---|
| miracl-ar | 64.2 | 75.2 | 46.8 | 56.2 |
| miracl-bn | 61.5 | 75.7 | 49.2 | 60.1 |
| miracl-de | 44.4 | 60.7 | 19.6 | 29.8 |
| miracl-en | 44.6 | 62.2 | 30.2 | 43.2 |
| miracl-es | 47.0 | 74.1 | 27.0 | 47.2 |
| miracl-fi | 63.7 | 76.2 | 51.4 | 61.6 |
| miracl-fr | 46.8 | 57.1 | 17.0 | 21.6 |
| miracl-hi | 50.7 | 62.9 | 41.0 | 48.9 |
| miracl-id | 44.8 | 63.8 | 39.2 | 54.7 |
| miracl-ru | 49.2 | 66.9 | 25.4 | 36.7 |
| **Avg** | 51.7 | 67.5 | 34.7 | 46.0 |
Further languages (not supported by Elasticsearch):
| Model | cohere multilingual-22-12 nDCG@10 | cohere multilingual-22-12 hit@3 |
|---|---|---|
| miracl-fa | 44.8 | 53.6 |
| miracl-ja | 49.0 | 61.0 |
| miracl-ko | 50.9 | 64.8 |
| miracl-sw | 61.4 | 74.5 |
| miracl-te | 67.8 | 72.3 |
| miracl-th | 60.2 | 71.9 |
| miracl-yo | 56.4 | 62.2 |
| miracl-zh | 43.8 | 56.5 |
| **Avg** | 54.3 | 64.6 |
|
Nexdata/American_English_Speech_Data_by_Mobile_Phone | ---
YAML tags:
- copy-paste the tags obtained with the tagging app: https://github.com/huggingface/datasets-tagging
---
# Dataset Card for Nexdata/American_English_Speech_Data_by_Mobile_Phone
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** https://www.nexdata.ai/datasets/999?source=Huggingface
- **Repository:**
- **Paper:**
- **Leaderboard:**
- **Point of Contact:**
### Dataset Summary
1842 American native speakers participated in the recording with authentic accent. The recorded script is designed by linguists, based on scenes, and cover a wide range of topics including generic, interactive, on-board and home. The text is manually proofread with high accuracy. It matches with mainstream Android and Apple system phones.
For more details, please refer to the link: https://www.nexdata.ai/datasets/999?source=Huggingface
### Supported Tasks and Leaderboards
automatic-speech-recognition, audio-speaker-identification: The dataset can be used to train a model for Automatic Speech Recognition (ASR).
### Languages
American English
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
Commerical License: https://drive.google.com/file/d/1saDCPm74D4UWfBL17VbkTsZLGfpOQj1J/view?usp=sharing
### Citation Information
[More Information Needed]
### Contributions
|
graphistry/disaster-headline-relevancy | ---
dataset_info:
features:
- name: news
dtype: string
- name: label
dtype: string
splits:
- name: train
num_bytes: 16780.4
num_examples: 210
- name: test
num_bytes: 7191.6
num_examples: 90
download_size: 22111
dataset_size: 23972.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
open-llm-leaderboard/details_yanolja__KoSOLAR-10.7B-v0.2 | ---
pretty_name: Evaluation run of yanolja/KoSOLAR-10.7B-v0.2
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [yanolja/KoSOLAR-10.7B-v0.2](https://huggingface.co/yanolja/KoSOLAR-10.7B-v0.2)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_yanolja__KoSOLAR-10.7B-v0.2\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-02-01T17:12:29.578851](https://huggingface.co/datasets/open-llm-leaderboard/details_yanolja__KoSOLAR-10.7B-v0.2/blob/main/results_2024-02-01T17-12-29.578851.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6465773677032779,\n\
\ \"acc_stderr\": 0.0317513460109556,\n \"acc_norm\": 0.6508843032597929,\n\
\ \"acc_norm_stderr\": 0.03238458854956254,\n \"mc1\": 0.3268053855569155,\n\
\ \"mc1_stderr\": 0.016419874731135032,\n \"mc2\": 0.4794002097808169,\n\
\ \"mc2_stderr\": 0.015071913407180176\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5597269624573379,\n \"acc_stderr\": 0.014506769524804241,\n\
\ \"acc_norm\": 0.613481228668942,\n \"acc_norm_stderr\": 0.014230084761910474\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.634833698466441,\n\
\ \"acc_stderr\": 0.0048049276087731236,\n \"acc_norm\": 0.8263294164509062,\n\
\ \"acc_norm_stderr\": 0.0037805175193024905\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \
\ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5555555555555556,\n\
\ \"acc_stderr\": 0.04292596718256981,\n \"acc_norm\": 0.5555555555555556,\n\
\ \"acc_norm_stderr\": 0.04292596718256981\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.7302631578947368,\n \"acc_stderr\": 0.03611780560284898,\n\
\ \"acc_norm\": 0.7302631578947368,\n \"acc_norm_stderr\": 0.03611780560284898\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.69,\n\
\ \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \
\ \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6943396226415094,\n \"acc_stderr\": 0.028353298073322663,\n\
\ \"acc_norm\": 0.6943396226415094,\n \"acc_norm_stderr\": 0.028353298073322663\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n\
\ \"acc_stderr\": 0.0358687928008034,\n \"acc_norm\": 0.7569444444444444,\n\
\ \"acc_norm_stderr\": 0.0358687928008034\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \
\ \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\
: 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n\
\ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \
\ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6127167630057804,\n\
\ \"acc_stderr\": 0.03714325906302064,\n \"acc_norm\": 0.6127167630057804,\n\
\ \"acc_norm_stderr\": 0.03714325906302064\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.047240073523838876,\n\
\ \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.047240073523838876\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.76,\n \"acc_stderr\": 0.04292346959909281,\n \"acc_norm\": 0.76,\n\
\ \"acc_norm_stderr\": 0.04292346959909281\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146268,\n\
\ \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146268\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n\
\ \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \
\ \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n\
\ \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.4576719576719577,\n \"acc_stderr\": 0.02565886886205834,\n \"\
acc_norm\": 0.4576719576719577,\n \"acc_norm_stderr\": 0.02565886886205834\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n\
\ \"acc_stderr\": 0.044444444444444495,\n \"acc_norm\": 0.4444444444444444,\n\
\ \"acc_norm_stderr\": 0.044444444444444495\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \
\ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n\
\ \"acc_stderr\": 0.02366421667164251,\n \"acc_norm\": 0.7774193548387097,\n\
\ \"acc_norm_stderr\": 0.02366421667164251\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.03517603540361008,\n\
\ \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.03517603540361008\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\"\
: 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.03158415324047709,\n\
\ \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.03158415324047709\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.8282828282828283,\n \"acc_stderr\": 0.02686971618742991,\n \"\
acc_norm\": 0.8282828282828283,\n \"acc_norm_stderr\": 0.02686971618742991\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n\
\ \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6230769230769231,\n \"acc_stderr\": 0.024570975364225995,\n\
\ \"acc_norm\": 0.6230769230769231,\n \"acc_norm_stderr\": 0.024570975364225995\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.3296296296296296,\n \"acc_stderr\": 0.02866120111652458,\n \
\ \"acc_norm\": 0.3296296296296296,\n \"acc_norm_stderr\": 0.02866120111652458\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6596638655462185,\n \"acc_stderr\": 0.030778057422931673,\n\
\ \"acc_norm\": 0.6596638655462185,\n \"acc_norm_stderr\": 0.030778057422931673\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242741,\n \"\
acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242741\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8366972477064221,\n \"acc_stderr\": 0.01584825580650155,\n \"\
acc_norm\": 0.8366972477064221,\n \"acc_norm_stderr\": 0.01584825580650155\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.6018518518518519,\n \"acc_stderr\": 0.033384734032074016,\n \"\
acc_norm\": 0.6018518518518519,\n \"acc_norm_stderr\": 0.033384734032074016\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.8431372549019608,\n \"acc_stderr\": 0.025524722324553346,\n \"\
acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.025524722324553346\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.8438818565400844,\n \"acc_stderr\": 0.023627159460318674,\n \
\ \"acc_norm\": 0.8438818565400844,\n \"acc_norm_stderr\": 0.023627159460318674\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.726457399103139,\n\
\ \"acc_stderr\": 0.029918586707798824,\n \"acc_norm\": 0.726457399103139,\n\
\ \"acc_norm_stderr\": 0.029918586707798824\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n\
\ \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.8181818181818182,\n \"acc_stderr\": 0.03520893951097654,\n \"\
acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.03520893951097654\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n\
\ \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n\
\ \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7361963190184049,\n \"acc_stderr\": 0.03462419931615623,\n\
\ \"acc_norm\": 0.7361963190184049,\n \"acc_norm_stderr\": 0.03462419931615623\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n\
\ \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n\
\ \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822584,\n\
\ \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822584\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n\
\ \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n\
\ \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \
\ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8160919540229885,\n\
\ \"acc_stderr\": 0.013853724170922526,\n \"acc_norm\": 0.8160919540229885,\n\
\ \"acc_norm_stderr\": 0.013853724170922526\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7254335260115607,\n \"acc_stderr\": 0.02402774515526502,\n\
\ \"acc_norm\": 0.7254335260115607,\n \"acc_norm_stderr\": 0.02402774515526502\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2569832402234637,\n\
\ \"acc_stderr\": 0.014614465821966344,\n \"acc_norm\": 0.2569832402234637,\n\
\ \"acc_norm_stderr\": 0.014614465821966344\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.024288619466046095,\n\
\ \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.024288619466046095\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n\
\ \"acc_stderr\": 0.02575586592263295,\n \"acc_norm\": 0.7106109324758842,\n\
\ \"acc_norm_stderr\": 0.02575586592263295\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600713002,\n\
\ \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600713002\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.5035460992907801,\n \"acc_stderr\": 0.02982674915328092,\n \
\ \"acc_norm\": 0.5035460992907801,\n \"acc_norm_stderr\": 0.02982674915328092\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.48239895697522817,\n\
\ \"acc_stderr\": 0.012762321298823641,\n \"acc_norm\": 0.48239895697522817,\n\
\ \"acc_norm_stderr\": 0.012762321298823641\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.7132352941176471,\n \"acc_stderr\": 0.027472274473233818,\n\
\ \"acc_norm\": 0.7132352941176471,\n \"acc_norm_stderr\": 0.027472274473233818\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.684640522875817,\n \"acc_stderr\": 0.018798086284886887,\n \
\ \"acc_norm\": 0.684640522875817,\n \"acc_norm_stderr\": 0.018798086284886887\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n\
\ \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n\
\ \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.763265306122449,\n \"acc_stderr\": 0.027212835884073153,\n\
\ \"acc_norm\": 0.763265306122449,\n \"acc_norm_stderr\": 0.027212835884073153\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n\
\ \"acc_stderr\": 0.02519692987482708,\n \"acc_norm\": 0.8507462686567164,\n\
\ \"acc_norm_stderr\": 0.02519692987482708\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.93,\n \"acc_stderr\": 0.025643239997624294,\n \
\ \"acc_norm\": 0.93,\n \"acc_norm_stderr\": 0.025643239997624294\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n\
\ \"acc_stderr\": 0.038695433234721015,\n \"acc_norm\": 0.5542168674698795,\n\
\ \"acc_norm_stderr\": 0.038695433234721015\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.7953216374269005,\n \"acc_stderr\": 0.030944459778533207,\n\
\ \"acc_norm\": 0.7953216374269005,\n \"acc_norm_stderr\": 0.030944459778533207\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3268053855569155,\n\
\ \"mc1_stderr\": 0.016419874731135032,\n \"mc2\": 0.4794002097808169,\n\
\ \"mc2_stderr\": 0.015071913407180176\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.8074191002367798,\n \"acc_stderr\": 0.01108253884749191\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.47687642153146326,\n \
\ \"acc_stderr\": 0.013757748544245323\n }\n}\n```"
repo_url: https://huggingface.co/yanolja/KoSOLAR-10.7B-v0.2
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|arc:challenge|25_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|gsm8k|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hellaswag|10_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-01T17-12-29.578851.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-01T17-12-29.578851.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- '**/details_harness|winogrande|5_2024-02-01T17-12-29.578851.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-02-01T17-12-29.578851.parquet'
- config_name: results
data_files:
- split: 2024_02_01T17_12_29.578851
path:
- results_2024-02-01T17-12-29.578851.parquet
- split: latest
path:
- results_2024-02-01T17-12-29.578851.parquet
---
# Dataset Card for Evaluation run of yanolja/KoSOLAR-10.7B-v0.2
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [yanolja/KoSOLAR-10.7B-v0.2](https://huggingface.co/yanolja/KoSOLAR-10.7B-v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_yanolja__KoSOLAR-10.7B-v0.2",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-01T17:12:29.578851](https://huggingface.co/datasets/open-llm-leaderboard/details_yanolja__KoSOLAR-10.7B-v0.2/blob/main/results_2024-02-01T17-12-29.578851.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6465773677032779,
"acc_stderr": 0.0317513460109556,
"acc_norm": 0.6508843032597929,
"acc_norm_stderr": 0.03238458854956254,
"mc1": 0.3268053855569155,
"mc1_stderr": 0.016419874731135032,
"mc2": 0.4794002097808169,
"mc2_stderr": 0.015071913407180176
},
"harness|arc:challenge|25": {
"acc": 0.5597269624573379,
"acc_stderr": 0.014506769524804241,
"acc_norm": 0.613481228668942,
"acc_norm_stderr": 0.014230084761910474
},
"harness|hellaswag|10": {
"acc": 0.634833698466441,
"acc_stderr": 0.0048049276087731236,
"acc_norm": 0.8263294164509062,
"acc_norm_stderr": 0.0037805175193024905
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.27,
"acc_stderr": 0.04461960433384741,
"acc_norm": 0.27,
"acc_norm_stderr": 0.04461960433384741
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5555555555555556,
"acc_stderr": 0.04292596718256981,
"acc_norm": 0.5555555555555556,
"acc_norm_stderr": 0.04292596718256981
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.7302631578947368,
"acc_stderr": 0.03611780560284898,
"acc_norm": 0.7302631578947368,
"acc_norm_stderr": 0.03611780560284898
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.69,
"acc_stderr": 0.04648231987117316,
"acc_norm": 0.69,
"acc_norm_stderr": 0.04648231987117316
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6943396226415094,
"acc_stderr": 0.028353298073322663,
"acc_norm": 0.6943396226415094,
"acc_norm_stderr": 0.028353298073322663
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7569444444444444,
"acc_stderr": 0.0358687928008034,
"acc_norm": 0.7569444444444444,
"acc_norm_stderr": 0.0358687928008034
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.44,
"acc_stderr": 0.04988876515698589,
"acc_norm": 0.44,
"acc_norm_stderr": 0.04988876515698589
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.54,
"acc_stderr": 0.05009082659620332,
"acc_norm": 0.54,
"acc_norm_stderr": 0.05009082659620332
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.36,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.36,
"acc_norm_stderr": 0.048241815132442176
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6127167630057804,
"acc_stderr": 0.03714325906302064,
"acc_norm": 0.6127167630057804,
"acc_norm_stderr": 0.03714325906302064
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.3431372549019608,
"acc_stderr": 0.047240073523838876,
"acc_norm": 0.3431372549019608,
"acc_norm_stderr": 0.047240073523838876
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.76,
"acc_stderr": 0.04292346959909281,
"acc_norm": 0.76,
"acc_norm_stderr": 0.04292346959909281
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5787234042553191,
"acc_stderr": 0.03227834510146268,
"acc_norm": 0.5787234042553191,
"acc_norm_stderr": 0.03227834510146268
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5,
"acc_stderr": 0.047036043419179864,
"acc_norm": 0.5,
"acc_norm_stderr": 0.047036043419179864
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5862068965517241,
"acc_stderr": 0.04104269211806232,
"acc_norm": 0.5862068965517241,
"acc_norm_stderr": 0.04104269211806232
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.4576719576719577,
"acc_stderr": 0.02565886886205834,
"acc_norm": 0.4576719576719577,
"acc_norm_stderr": 0.02565886886205834
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.4444444444444444,
"acc_stderr": 0.044444444444444495,
"acc_norm": 0.4444444444444444,
"acc_norm_stderr": 0.044444444444444495
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.38,
"acc_stderr": 0.04878317312145632,
"acc_norm": 0.38,
"acc_norm_stderr": 0.04878317312145632
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7774193548387097,
"acc_stderr": 0.02366421667164251,
"acc_norm": 0.7774193548387097,
"acc_norm_stderr": 0.02366421667164251
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.49261083743842365,
"acc_stderr": 0.03517603540361008,
"acc_norm": 0.49261083743842365,
"acc_norm_stderr": 0.03517603540361008
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.793939393939394,
"acc_stderr": 0.03158415324047709,
"acc_norm": 0.793939393939394,
"acc_norm_stderr": 0.03158415324047709
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.8282828282828283,
"acc_stderr": 0.02686971618742991,
"acc_norm": 0.8282828282828283,
"acc_norm_stderr": 0.02686971618742991
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8963730569948186,
"acc_stderr": 0.02199531196364424,
"acc_norm": 0.8963730569948186,
"acc_norm_stderr": 0.02199531196364424
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6230769230769231,
"acc_stderr": 0.024570975364225995,
"acc_norm": 0.6230769230769231,
"acc_norm_stderr": 0.024570975364225995
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.3296296296296296,
"acc_stderr": 0.02866120111652458,
"acc_norm": 0.3296296296296296,
"acc_norm_stderr": 0.02866120111652458
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6596638655462185,
"acc_stderr": 0.030778057422931673,
"acc_norm": 0.6596638655462185,
"acc_norm_stderr": 0.030778057422931673
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.36423841059602646,
"acc_stderr": 0.03929111781242741,
"acc_norm": 0.36423841059602646,
"acc_norm_stderr": 0.03929111781242741
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8366972477064221,
"acc_stderr": 0.01584825580650155,
"acc_norm": 0.8366972477064221,
"acc_norm_stderr": 0.01584825580650155
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.6018518518518519,
"acc_stderr": 0.033384734032074016,
"acc_norm": 0.6018518518518519,
"acc_norm_stderr": 0.033384734032074016
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8431372549019608,
"acc_stderr": 0.025524722324553346,
"acc_norm": 0.8431372549019608,
"acc_norm_stderr": 0.025524722324553346
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8438818565400844,
"acc_stderr": 0.023627159460318674,
"acc_norm": 0.8438818565400844,
"acc_norm_stderr": 0.023627159460318674
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.726457399103139,
"acc_stderr": 0.029918586707798824,
"acc_norm": 0.726457399103139,
"acc_norm_stderr": 0.029918586707798824
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7709923664122137,
"acc_stderr": 0.036853466317118506,
"acc_norm": 0.7709923664122137,
"acc_norm_stderr": 0.036853466317118506
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.8181818181818182,
"acc_stderr": 0.03520893951097654,
"acc_norm": 0.8181818181818182,
"acc_norm_stderr": 0.03520893951097654
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7962962962962963,
"acc_stderr": 0.03893542518824847,
"acc_norm": 0.7962962962962963,
"acc_norm_stderr": 0.03893542518824847
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7361963190184049,
"acc_stderr": 0.03462419931615623,
"acc_norm": 0.7361963190184049,
"acc_norm_stderr": 0.03462419931615623
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.44642857142857145,
"acc_stderr": 0.04718471485219588,
"acc_norm": 0.44642857142857145,
"acc_norm_stderr": 0.04718471485219588
},
"harness|hendrycksTest-management|5": {
"acc": 0.8058252427184466,
"acc_stderr": 0.03916667762822584,
"acc_norm": 0.8058252427184466,
"acc_norm_stderr": 0.03916667762822584
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8803418803418803,
"acc_stderr": 0.021262719400406964,
"acc_norm": 0.8803418803418803,
"acc_norm_stderr": 0.021262719400406964
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.72,
"acc_stderr": 0.045126085985421276,
"acc_norm": 0.72,
"acc_norm_stderr": 0.045126085985421276
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8160919540229885,
"acc_stderr": 0.013853724170922526,
"acc_norm": 0.8160919540229885,
"acc_norm_stderr": 0.013853724170922526
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7254335260115607,
"acc_stderr": 0.02402774515526502,
"acc_norm": 0.7254335260115607,
"acc_norm_stderr": 0.02402774515526502
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.2569832402234637,
"acc_stderr": 0.014614465821966344,
"acc_norm": 0.2569832402234637,
"acc_norm_stderr": 0.014614465821966344
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7647058823529411,
"acc_stderr": 0.024288619466046095,
"acc_norm": 0.7647058823529411,
"acc_norm_stderr": 0.024288619466046095
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7106109324758842,
"acc_stderr": 0.02575586592263295,
"acc_norm": 0.7106109324758842,
"acc_norm_stderr": 0.02575586592263295
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7469135802469136,
"acc_stderr": 0.024191808600713002,
"acc_norm": 0.7469135802469136,
"acc_norm_stderr": 0.024191808600713002
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.5035460992907801,
"acc_stderr": 0.02982674915328092,
"acc_norm": 0.5035460992907801,
"acc_norm_stderr": 0.02982674915328092
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.48239895697522817,
"acc_stderr": 0.012762321298823641,
"acc_norm": 0.48239895697522817,
"acc_norm_stderr": 0.012762321298823641
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.7132352941176471,
"acc_stderr": 0.027472274473233818,
"acc_norm": 0.7132352941176471,
"acc_norm_stderr": 0.027472274473233818
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.684640522875817,
"acc_stderr": 0.018798086284886887,
"acc_norm": 0.684640522875817,
"acc_norm_stderr": 0.018798086284886887
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.763265306122449,
"acc_stderr": 0.027212835884073153,
"acc_norm": 0.763265306122449,
"acc_norm_stderr": 0.027212835884073153
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8507462686567164,
"acc_stderr": 0.02519692987482708,
"acc_norm": 0.8507462686567164,
"acc_norm_stderr": 0.02519692987482708
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.93,
"acc_stderr": 0.025643239997624294,
"acc_norm": 0.93,
"acc_norm_stderr": 0.025643239997624294
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5542168674698795,
"acc_stderr": 0.038695433234721015,
"acc_norm": 0.5542168674698795,
"acc_norm_stderr": 0.038695433234721015
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.7953216374269005,
"acc_stderr": 0.030944459778533207,
"acc_norm": 0.7953216374269005,
"acc_norm_stderr": 0.030944459778533207
},
"harness|truthfulqa:mc|0": {
"mc1": 0.3268053855569155,
"mc1_stderr": 0.016419874731135032,
"mc2": 0.4794002097808169,
"mc2_stderr": 0.015071913407180176
},
"harness|winogrande|5": {
"acc": 0.8074191002367798,
"acc_stderr": 0.01108253884749191
},
"harness|gsm8k|5": {
"acc": 0.47687642153146326,
"acc_stderr": 0.013757748544245323
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
Gummybear05/EY_speed | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
dataset_info:
features:
- name: audio
struct:
- name: array
sequence: float64
- name: path
dtype: string
- name: sample_rate
dtype: int64
- name: text
dtype: string
- name: scriptId
dtype: int64
- name: fileNm
dtype: string
- name: recrdTime
dtype: float64
- name: recrdQuality
dtype: int64
- name: recrdDt
dtype: string
- name: scriptSetNo
dtype: string
- name: recrdEnvrn
dtype: string
- name: colctUnitCode
dtype: string
- name: cityCode
dtype: string
- name: recrdUnit
dtype: string
- name: convrsThema
dtype: string
- name: gender
dtype: string
- name: recorderId
dtype: string
- name: age
dtype: int64
splits:
- name: train
num_bytes: 4865314660
num_examples: 5400
download_size: 2492360968
dataset_size: 4865314660
---
# Dataset Card for "EY_speed"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Iker/NoticIA_Human_Validation | ---
language:
- es
license: cc-by-nc-sa-4.0
multilinguality:
- monolingual
size_categories:
- n<1K
source_datasets:
- original
task_categories:
- summarization
pretty_name: NoticIA Human Validation
dataset_info:
features:
- name: web_url
dtype: string
- name: web_headline
dtype: string
- name: summary
dtype: string
- name: summary2
dtype: string
- name: web_text
dtype: string
splits:
- name: test
num_examples: 100
configs:
- config_name: default
data_files:
- split: test
path: test.jsonl
tags:
- summarization
- clickbait
- news
---
<p align="center">
<img src="https://huggingface.co/datasets/Iker/NoticIA/resolve/main/assets/logo.png" style="height: 250px;">
</p>
<h3 align="center">"A Clickbait Article Summarization Dataset in Spanish."</h3>
This repository contains the manual annotations from a second human to validate the test set of the NoticIA dataset.
The full NoticIA dataset is available here: [https://huggingface.co/datasets/Iker/NoticIA](https://huggingface.co/datasets/Iker/NoticIA)
# Data explanation
- **web_url** (int): The URL of the news article
- **web_headline** (str): The headline of the article, which is a Clickbait.
- **summary** (str): The original summary in the NoticIA dataset.
- **summary2** (str): The second summary written by another human to validate the quality of `summary`
- **web_text** (int): The body of the article.
# Dataset Description
- **Curated by:** [Iker Garcรญa-Ferrero](https://ikergarcia1996.github.io/Iker-Garcia-Ferrero/), [Begoรฑa Altura](https://www.linkedin.com/in/bego%C3%B1a-altuna-78014139)
- **Language(s) (NLP):** Spanish
- **License:** apache-2.0
# Dataset Usage
```Python
# pip install datasets evaluate rouge-score
from datasets import load_dataset
from evaluate import load
dataset = load_dataset("Iker/NoticIA_Human_Validation",split="test")
rouge = load("rouge")
results = rouge.compute(
predictions=[x["summary2"] for x in dataset],
references=[[x["summary"]] for x in dataset],
use_aggregator=True,
)
print(results)
```
# Uses
This dataset is intended to build models tailored for academic research that can extract information from large texts. The objective is to research whether current LLMs, given a question formulated as a Clickbait headline, can locate the answer within the article body and summarize the information in a few words. The dataset also aims to serve as a task to evaluate the performance of current LLMs in Spanish.
# Out-of-Scope Use
You cannot use this dataset to develop systems that directly harm the newspapers included in the dataset. This includes using the dataset to train profit-oriented LLMs capable of generating articles from a short text or headline, as well as developing profit-oriented bots that automatically summarize articles without the permission of the article's owner. Additionally, you are not permitted to train a system with this dataset that generates clickbait headlines.
This dataset contains text and headlines from newspapers; therefore, you cannot use it for commercial purposes unless you have the license for the data.
# Dataset Creation
The dataset has been meticulously created by hand. We utilize two sources to compile Clickbait articles:
- The Twitter user [@ahorrandoclick1](https://twitter.com/ahorrandoclick1), who reposts Clickbait articles along with a hand-crafted summary. Although we use their summaries as a reference, most of them have been rewritten (750 examples from this source).
- The web demo [โ๏ธClickbaitFighterโ๏ธ](https://iker-clickbaitfighter.hf.space/), which operates a pre-trained model using an early iteration of our dataset. We collect all the model inputs/outputs and manually correct them (100 examples from this source).
# Who are the annotators?
The dataset was originally by [Iker Garcรญa-Ferrero](https://ikergarcia1996.github.io/Iker-Garcia-Ferrero/) and has been validated by [Begoรฑa Altura](https://www.linkedin.com/in/bego%C3%B1a-altuna-78014139).
The annotation took ~40 hours.
# Citation
```bittext
@misc{noticia2024,
title={NoticIA: A Clickbait Article Summarization Dataset in Spanish},
author={Iker Garcรญa-Ferrero and Begoรฑa Altuna},
year={2024},
eprint={2404.07611},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
|
pa-shk/sberquad-tiny | ---
dataset_info:
- config_name: docs
features:
- name: doc
dtype: string
splits:
- name: train
num_bytes: 14879284
num_examples: 10858
download_size: 7808166
dataset_size: 14879284
- config_name: qrels
features:
- name: query
dtype: string
- name: relevant_docs
sequence: int64
splits:
- name: train
num_bytes: 1216392
num_examples: 9066
- name: validation
num_bytes: 133684
num_examples: 1008
- name: test
num_bytes: 324627
num_examples: 2394
download_size: 914235
dataset_size: 1674703
configs:
- config_name: docs
data_files:
- split: train
path: docs/train-*
- config_name: qrels
data_files:
- split: train
path: qrels/train-*
- split: validation
path: qrels/validation-*
- split: test
path: qrels/test-*
---
|
open-llm-leaderboard/details_grimjim__kuno-kunoichi-v1-DPO-v2-SLERP-7B | ---
pretty_name: Evaluation run of grimjim/kuno-kunoichi-v1-DPO-v2-SLERP-7B
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [grimjim/kuno-kunoichi-v1-DPO-v2-SLERP-7B](https://huggingface.co/grimjim/kuno-kunoichi-v1-DPO-v2-SLERP-7B)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_grimjim__kuno-kunoichi-v1-DPO-v2-SLERP-7B\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-03-10T16:52:41.232244](https://huggingface.co/datasets/open-llm-leaderboard/details_grimjim__kuno-kunoichi-v1-DPO-v2-SLERP-7B/blob/main/results_2024-03-10T16-52-41.232244.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6521245844299478,\n\
\ \"acc_stderr\": 0.03206483289505714,\n \"acc_norm\": 0.6525873691124378,\n\
\ \"acc_norm_stderr\": 0.03271832415925097,\n \"mc1\": 0.4908200734394125,\n\
\ \"mc1_stderr\": 0.01750055072481975,\n \"mc2\": 0.6512373349905823,\n\
\ \"mc2_stderr\": 0.015414384395752878\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.6646757679180887,\n \"acc_stderr\": 0.01379618294778556,\n\
\ \"acc_norm\": 0.6911262798634812,\n \"acc_norm_stderr\": 0.013501770929344003\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7005576578370842,\n\
\ \"acc_stderr\": 0.004570777326263903,\n \"acc_norm\": 0.8733320055765784,\n\
\ \"acc_norm_stderr\": 0.003319209400135123\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\
\ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n\
\ \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n\
\ \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6776315789473685,\n \"acc_stderr\": 0.03803510248351585,\n\
\ \"acc_norm\": 0.6776315789473685,\n \"acc_norm_stderr\": 0.03803510248351585\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n\
\ \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \
\ \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.02804918631569526,\n\
\ \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.02804918631569526\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n\
\ \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n\
\ \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\"\
: 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\"\
: {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \
\ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n \
\ },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n\
\ \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \
\ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\"\
: {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.03533133389323657,\n\
\ \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.03533133389323657\n\
\ },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n\
\ \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n\
\ \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\"\
: {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \
\ \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816506\n \
\ },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5957446808510638,\n\
\ \"acc_stderr\": 0.03208115750788684,\n \"acc_norm\": 0.5957446808510638,\n\
\ \"acc_norm_stderr\": 0.03208115750788684\n },\n \"harness|hendrycksTest-econometrics|5\"\
: {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n\
\ \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n\
\ },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\"\
: 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n \"\
acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.3968253968253968,\n \"acc_stderr\": 0.02519710107424649,\n \"\
acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.02519710107424649\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n\
\ \"acc_stderr\": 0.04463112720677171,\n \"acc_norm\": 0.46825396825396826,\n\
\ \"acc_norm_stderr\": 0.04463112720677171\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \
\ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\
: 0.7967741935483871,\n \"acc_stderr\": 0.022891687984554963,\n \"\
acc_norm\": 0.7967741935483871,\n \"acc_norm_stderr\": 0.022891687984554963\n\
\ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\
: 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"\
acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\"\
: 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n\
\ \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494563,\n \"\
acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494563\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033484,\n\
\ \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033484\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \
\ \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.34074074074074073,\n \"acc_stderr\": 0.028897748741131154,\n \
\ \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.028897748741131154\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.029344572500634335,\n\
\ \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.029344572500634335\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"\
acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.8495412844036697,\n \"acc_stderr\": 0.015328563932669237,\n \"\
acc_norm\": 0.8495412844036697,\n \"acc_norm_stderr\": 0.015328563932669237\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"\
acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926917,\n \"\
acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926917\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.8016877637130801,\n \"acc_stderr\": 0.025955020841621112,\n \
\ \"acc_norm\": 0.8016877637130801,\n \"acc_norm_stderr\": 0.025955020841621112\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n\
\ \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n\
\ \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596913,\n\
\ \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596913\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\"\
: 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n\
\ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n\
\ \"acc_stderr\": 0.03755265865037181,\n \"acc_norm\": 0.8148148148148148,\n\
\ \"acc_norm_stderr\": 0.03755265865037181\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7975460122699386,\n \"acc_stderr\": 0.031570650789119005,\n\
\ \"acc_norm\": 0.7975460122699386,\n \"acc_norm_stderr\": 0.031570650789119005\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n\
\ \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n\
\ \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n\
\ \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n\
\ \"acc_stderr\": 0.022509033937077802,\n \"acc_norm\": 0.8632478632478633,\n\
\ \"acc_norm_stderr\": 0.022509033937077802\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \
\ \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8352490421455939,\n\
\ \"acc_stderr\": 0.013265346261323797,\n \"acc_norm\": 0.8352490421455939,\n\
\ \"acc_norm_stderr\": 0.013265346261323797\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.023445826276545543,\n\
\ \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.023445826276545543\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4491620111731844,\n\
\ \"acc_stderr\": 0.016635838341631928,\n \"acc_norm\": 0.4491620111731844,\n\
\ \"acc_norm_stderr\": 0.016635838341631928\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.02555316999182652,\n\
\ \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.02555316999182652\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6977491961414791,\n\
\ \"acc_stderr\": 0.026082700695399665,\n \"acc_norm\": 0.6977491961414791,\n\
\ \"acc_norm_stderr\": 0.026082700695399665\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7438271604938271,\n \"acc_stderr\": 0.024288533637726095,\n\
\ \"acc_norm\": 0.7438271604938271,\n \"acc_norm_stderr\": 0.024288533637726095\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \
\ \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46870925684485004,\n\
\ \"acc_stderr\": 0.012745204626083136,\n \"acc_norm\": 0.46870925684485004,\n\
\ \"acc_norm_stderr\": 0.012745204626083136\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6985294117647058,\n \"acc_stderr\": 0.027875982114273168,\n\
\ \"acc_norm\": 0.6985294117647058,\n \"acc_norm_stderr\": 0.027875982114273168\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6535947712418301,\n \"acc_stderr\": 0.01924978569171721,\n \
\ \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.01924978569171721\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n\
\ \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n\
\ \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n\
\ \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n\
\ \"acc_stderr\": 0.025538433368578327,\n \"acc_norm\": 0.845771144278607,\n\
\ \"acc_norm_stderr\": 0.025538433368578327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \
\ \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n\
\ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5301204819277109,\n\
\ \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.5301204819277109,\n\
\ \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8538011695906432,\n \"acc_stderr\": 0.027097290118070806,\n\
\ \"acc_norm\": 0.8538011695906432,\n \"acc_norm_stderr\": 0.027097290118070806\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4908200734394125,\n\
\ \"mc1_stderr\": 0.01750055072481975,\n \"mc2\": 0.6512373349905823,\n\
\ \"mc2_stderr\": 0.015414384395752878\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.8089976322020521,\n \"acc_stderr\": 0.011047808761510427\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6671721000758151,\n \
\ \"acc_stderr\": 0.012979892496598283\n }\n}\n```"
repo_url: https://huggingface.co/grimjim/kuno-kunoichi-v1-DPO-v2-SLERP-7B
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|arc:challenge|25_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|arc:challenge|25_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|gsm8k|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|gsm8k|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hellaswag|10_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hellaswag|10_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-10T16-47-06.900885.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-03-10T16-52-41.232244.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-03-10T16-52-41.232244.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- '**/details_harness|winogrande|5_2024-03-10T16-47-06.900885.parquet'
- split: 2024_03_10T16_52_41.232244
path:
- '**/details_harness|winogrande|5_2024-03-10T16-52-41.232244.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-03-10T16-52-41.232244.parquet'
- config_name: results
data_files:
- split: 2024_03_10T16_47_06.900885
path:
- results_2024-03-10T16-47-06.900885.parquet
- split: 2024_03_10T16_52_41.232244
path:
- results_2024-03-10T16-52-41.232244.parquet
- split: latest
path:
- results_2024-03-10T16-52-41.232244.parquet
---
# Dataset Card for Evaluation run of grimjim/kuno-kunoichi-v1-DPO-v2-SLERP-7B
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [grimjim/kuno-kunoichi-v1-DPO-v2-SLERP-7B](https://huggingface.co/grimjim/kuno-kunoichi-v1-DPO-v2-SLERP-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_grimjim__kuno-kunoichi-v1-DPO-v2-SLERP-7B",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-03-10T16:52:41.232244](https://huggingface.co/datasets/open-llm-leaderboard/details_grimjim__kuno-kunoichi-v1-DPO-v2-SLERP-7B/blob/main/results_2024-03-10T16-52-41.232244.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.6521245844299478,
"acc_stderr": 0.03206483289505714,
"acc_norm": 0.6525873691124378,
"acc_norm_stderr": 0.03271832415925097,
"mc1": 0.4908200734394125,
"mc1_stderr": 0.01750055072481975,
"mc2": 0.6512373349905823,
"mc2_stderr": 0.015414384395752878
},
"harness|arc:challenge|25": {
"acc": 0.6646757679180887,
"acc_stderr": 0.01379618294778556,
"acc_norm": 0.6911262798634812,
"acc_norm_stderr": 0.013501770929344003
},
"harness|hellaswag|10": {
"acc": 0.7005576578370842,
"acc_stderr": 0.004570777326263903,
"acc_norm": 0.8733320055765784,
"acc_norm_stderr": 0.003319209400135123
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.35,
"acc_stderr": 0.0479372485441102,
"acc_norm": 0.35,
"acc_norm_stderr": 0.0479372485441102
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.6296296296296297,
"acc_stderr": 0.041716541613545426,
"acc_norm": 0.6296296296296297,
"acc_norm_stderr": 0.041716541613545426
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6776315789473685,
"acc_stderr": 0.03803510248351585,
"acc_norm": 0.6776315789473685,
"acc_norm_stderr": 0.03803510248351585
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.59,
"acc_stderr": 0.049431107042371025,
"acc_norm": 0.59,
"acc_norm_stderr": 0.049431107042371025
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.7056603773584905,
"acc_stderr": 0.02804918631569526,
"acc_norm": 0.7056603773584905,
"acc_norm_stderr": 0.02804918631569526
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.7638888888888888,
"acc_stderr": 0.03551446610810826,
"acc_norm": 0.7638888888888888,
"acc_norm_stderr": 0.03551446610810826
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.45,
"acc_stderr": 0.05,
"acc_norm": 0.45,
"acc_norm_stderr": 0.05
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.5,
"acc_stderr": 0.050251890762960605,
"acc_norm": 0.5,
"acc_norm_stderr": 0.050251890762960605
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.3,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.3,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.6878612716763006,
"acc_stderr": 0.03533133389323657,
"acc_norm": 0.6878612716763006,
"acc_norm_stderr": 0.03533133389323657
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.4411764705882353,
"acc_stderr": 0.049406356306056595,
"acc_norm": 0.4411764705882353,
"acc_norm_stderr": 0.049406356306056595
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.77,
"acc_stderr": 0.04229525846816506,
"acc_norm": 0.77,
"acc_norm_stderr": 0.04229525846816506
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5957446808510638,
"acc_stderr": 0.03208115750788684,
"acc_norm": 0.5957446808510638,
"acc_norm_stderr": 0.03208115750788684
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.49122807017543857,
"acc_stderr": 0.04702880432049615,
"acc_norm": 0.49122807017543857,
"acc_norm_stderr": 0.04702880432049615
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5379310344827586,
"acc_stderr": 0.04154659671707548,
"acc_norm": 0.5379310344827586,
"acc_norm_stderr": 0.04154659671707548
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3968253968253968,
"acc_stderr": 0.02519710107424649,
"acc_norm": 0.3968253968253968,
"acc_norm_stderr": 0.02519710107424649
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.46825396825396826,
"acc_stderr": 0.04463112720677171,
"acc_norm": 0.46825396825396826,
"acc_norm_stderr": 0.04463112720677171
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.35,
"acc_stderr": 0.047937248544110196,
"acc_norm": 0.35,
"acc_norm_stderr": 0.047937248544110196
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7967741935483871,
"acc_stderr": 0.022891687984554963,
"acc_norm": 0.7967741935483871,
"acc_norm_stderr": 0.022891687984554963
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.5024630541871922,
"acc_stderr": 0.035179450386910616,
"acc_norm": 0.5024630541871922,
"acc_norm_stderr": 0.035179450386910616
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7575757575757576,
"acc_stderr": 0.03346409881055953,
"acc_norm": 0.7575757575757576,
"acc_norm_stderr": 0.03346409881055953
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7828282828282829,
"acc_stderr": 0.02937661648494563,
"acc_norm": 0.7828282828282829,
"acc_norm_stderr": 0.02937661648494563
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.9015544041450777,
"acc_stderr": 0.021500249576033484,
"acc_norm": 0.9015544041450777,
"acc_norm_stderr": 0.021500249576033484
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6743589743589744,
"acc_stderr": 0.02375966576741229,
"acc_norm": 0.6743589743589744,
"acc_norm_stderr": 0.02375966576741229
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.34074074074074073,
"acc_stderr": 0.028897748741131154,
"acc_norm": 0.34074074074074073,
"acc_norm_stderr": 0.028897748741131154
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.7142857142857143,
"acc_stderr": 0.029344572500634335,
"acc_norm": 0.7142857142857143,
"acc_norm_stderr": 0.029344572500634335
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3708609271523179,
"acc_stderr": 0.03943966699183629,
"acc_norm": 0.3708609271523179,
"acc_norm_stderr": 0.03943966699183629
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.8495412844036697,
"acc_stderr": 0.015328563932669237,
"acc_norm": 0.8495412844036697,
"acc_norm_stderr": 0.015328563932669237
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5231481481481481,
"acc_stderr": 0.03406315360711507,
"acc_norm": 0.5231481481481481,
"acc_norm_stderr": 0.03406315360711507
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.8382352941176471,
"acc_stderr": 0.025845017986926917,
"acc_norm": 0.8382352941176471,
"acc_norm_stderr": 0.025845017986926917
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.8016877637130801,
"acc_stderr": 0.025955020841621112,
"acc_norm": 0.8016877637130801,
"acc_norm_stderr": 0.025955020841621112
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.695067264573991,
"acc_stderr": 0.030898610882477515,
"acc_norm": 0.695067264573991,
"acc_norm_stderr": 0.030898610882477515
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7633587786259542,
"acc_stderr": 0.03727673575596913,
"acc_norm": 0.7633587786259542,
"acc_norm_stderr": 0.03727673575596913
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.768595041322314,
"acc_stderr": 0.03849856098794088,
"acc_norm": 0.768595041322314,
"acc_norm_stderr": 0.03849856098794088
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.8148148148148148,
"acc_stderr": 0.03755265865037181,
"acc_norm": 0.8148148148148148,
"acc_norm_stderr": 0.03755265865037181
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7975460122699386,
"acc_stderr": 0.031570650789119005,
"acc_norm": 0.7975460122699386,
"acc_norm_stderr": 0.031570650789119005
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.45535714285714285,
"acc_stderr": 0.047268355537191,
"acc_norm": 0.45535714285714285,
"acc_norm_stderr": 0.047268355537191
},
"harness|hendrycksTest-management|5": {
"acc": 0.7864077669902912,
"acc_stderr": 0.040580420156460344,
"acc_norm": 0.7864077669902912,
"acc_norm_stderr": 0.040580420156460344
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8632478632478633,
"acc_stderr": 0.022509033937077802,
"acc_norm": 0.8632478632478633,
"acc_norm_stderr": 0.022509033937077802
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.73,
"acc_stderr": 0.044619604333847394,
"acc_norm": 0.73,
"acc_norm_stderr": 0.044619604333847394
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8352490421455939,
"acc_stderr": 0.013265346261323797,
"acc_norm": 0.8352490421455939,
"acc_norm_stderr": 0.013265346261323797
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.7456647398843931,
"acc_stderr": 0.023445826276545543,
"acc_norm": 0.7456647398843931,
"acc_norm_stderr": 0.023445826276545543
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.4491620111731844,
"acc_stderr": 0.016635838341631928,
"acc_norm": 0.4491620111731844,
"acc_norm_stderr": 0.016635838341631928
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7254901960784313,
"acc_stderr": 0.02555316999182652,
"acc_norm": 0.7254901960784313,
"acc_norm_stderr": 0.02555316999182652
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.6977491961414791,
"acc_stderr": 0.026082700695399665,
"acc_norm": 0.6977491961414791,
"acc_norm_stderr": 0.026082700695399665
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7438271604938271,
"acc_stderr": 0.024288533637726095,
"acc_norm": 0.7438271604938271,
"acc_norm_stderr": 0.024288533637726095
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4929078014184397,
"acc_stderr": 0.02982449855912901,
"acc_norm": 0.4929078014184397,
"acc_norm_stderr": 0.02982449855912901
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.46870925684485004,
"acc_stderr": 0.012745204626083136,
"acc_norm": 0.46870925684485004,
"acc_norm_stderr": 0.012745204626083136
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6985294117647058,
"acc_stderr": 0.027875982114273168,
"acc_norm": 0.6985294117647058,
"acc_norm_stderr": 0.027875982114273168
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6535947712418301,
"acc_stderr": 0.01924978569171721,
"acc_norm": 0.6535947712418301,
"acc_norm_stderr": 0.01924978569171721
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6727272727272727,
"acc_stderr": 0.0449429086625209,
"acc_norm": 0.6727272727272727,
"acc_norm_stderr": 0.0449429086625209
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7306122448979592,
"acc_stderr": 0.02840125202902294,
"acc_norm": 0.7306122448979592,
"acc_norm_stderr": 0.02840125202902294
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.845771144278607,
"acc_stderr": 0.025538433368578327,
"acc_norm": 0.845771144278607,
"acc_norm_stderr": 0.025538433368578327
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.0358870281282637,
"acc_norm": 0.85,
"acc_norm_stderr": 0.0358870281282637
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5301204819277109,
"acc_stderr": 0.03885425420866767,
"acc_norm": 0.5301204819277109,
"acc_norm_stderr": 0.03885425420866767
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8538011695906432,
"acc_stderr": 0.027097290118070806,
"acc_norm": 0.8538011695906432,
"acc_norm_stderr": 0.027097290118070806
},
"harness|truthfulqa:mc|0": {
"mc1": 0.4908200734394125,
"mc1_stderr": 0.01750055072481975,
"mc2": 0.6512373349905823,
"mc2_stderr": 0.015414384395752878
},
"harness|winogrande|5": {
"acc": 0.8089976322020521,
"acc_stderr": 0.011047808761510427
},
"harness|gsm8k|5": {
"acc": 0.6671721000758151,
"acc_stderr": 0.012979892496598283
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
open-llm-leaderboard/details_digitous__Javelin-R | ---
pretty_name: Evaluation run of digitous/Javelin-R
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [digitous/Javelin-R](https://huggingface.co/digitous/Javelin-R) on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the agregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_digitous__Javelin-R\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-10-17T16:47:23.562896](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Javelin-R/blob/main/results_2023-10-17T16-47-23.562896.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0010486577181208054,\n\
\ \"em_stderr\": 0.0003314581465219256,\n \"f1\": 0.05006606543624186,\n\
\ \"f1_stderr\": 0.001221286433761839,\n \"acc\": 0.3323333371230866,\n\
\ \"acc_stderr\": 0.008475235356826075\n },\n \"harness|drop|3\": {\n\
\ \"em\": 0.0010486577181208054,\n \"em_stderr\": 0.0003314581465219256,\n\
\ \"f1\": 0.05006606543624186,\n \"f1_stderr\": 0.001221286433761839\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.016679302501895376,\n \
\ \"acc_stderr\": 0.0035275958887224313\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.6479873717442778,\n \"acc_stderr\": 0.01342287482492972\n\
\ }\n}\n```"
repo_url: https://huggingface.co/digitous/Javelin-R
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|arc:challenge|25_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_10_17T16_47_23.562896
path:
- '**/details_harness|drop|3_2023-10-17T16-47-23.562896.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-10-17T16-47-23.562896.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_10_17T16_47_23.562896
path:
- '**/details_harness|gsm8k|5_2023-10-17T16-47-23.562896.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-10-17T16-47-23.562896.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hellaswag|10_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:50:05.826283.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-management|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- '**/details_harness|truthfulqa:mc|0_2023-07-19T19:50:05.826283.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-07-19T19:50:05.826283.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_10_17T16_47_23.562896
path:
- '**/details_harness|winogrande|5_2023-10-17T16-47-23.562896.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-10-17T16-47-23.562896.parquet'
- config_name: results
data_files:
- split: 2023_07_19T19_50_05.826283
path:
- results_2023-07-19T19:50:05.826283.parquet
- split: 2023_10_17T16_47_23.562896
path:
- results_2023-10-17T16-47-23.562896.parquet
- split: latest
path:
- results_2023-10-17T16-47-23.562896.parquet
---
# Dataset Card for Evaluation run of digitous/Javelin-R
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/digitous/Javelin-R
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [digitous/Javelin-R](https://huggingface.co/digitous/Javelin-R) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_digitous__Javelin-R",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-10-17T16:47:23.562896](https://huggingface.co/datasets/open-llm-leaderboard/details_digitous__Javelin-R/blob/main/results_2023-10-17T16-47-23.562896.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"em": 0.0010486577181208054,
"em_stderr": 0.0003314581465219256,
"f1": 0.05006606543624186,
"f1_stderr": 0.001221286433761839,
"acc": 0.3323333371230866,
"acc_stderr": 0.008475235356826075
},
"harness|drop|3": {
"em": 0.0010486577181208054,
"em_stderr": 0.0003314581465219256,
"f1": 0.05006606543624186,
"f1_stderr": 0.001221286433761839
},
"harness|gsm8k|5": {
"acc": 0.016679302501895376,
"acc_stderr": 0.0035275958887224313
},
"harness|winogrande|5": {
"acc": 0.6479873717442778,
"acc_stderr": 0.01342287482492972
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
Dulain23/Patient_Scenarios | ---
dataset_info:
features:
- name: patient_age
dtype: int64
- name: patient_symptoms
dtype: string
- name: patient_emotional_state
dtype: string
- name: patient_gender
dtype: string
- name: patient_description
dtype: string
- name: patient_characteristics
dtype: string
- name: patient_medical_history
dtype: string
- name: patient_reason_for_visit
dtype: string
- name: patient_name
dtype: string
- name: prompts
dtype: string
splits:
- name: train
num_bytes: 266622
num_examples: 100
download_size: 85821
dataset_size: 266622
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
open-llm-leaderboard/details_CorticalStack__travel-mistral-7B-16b-base | ---
pretty_name: Evaluation run of CorticalStack/travel-mistral-7B-16b-base
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [CorticalStack/travel-mistral-7B-16b-base](https://huggingface.co/CorticalStack/travel-mistral-7B-16b-base)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 63 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the aggregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CorticalStack__travel-mistral-7B-16b-base\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2024-02-11T09:58:07.096782](https://huggingface.co/datasets/open-llm-leaderboard/details_CorticalStack__travel-mistral-7B-16b-base/blob/main/results_2024-02-11T09-58-07.096782.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.62333273914223,\n\
\ \"acc_stderr\": 0.032635939156381126,\n \"acc_norm\": 0.6288825536861529,\n\
\ \"acc_norm_stderr\": 0.03329783376871095,\n \"mc1\": 0.35862913096695226,\n\
\ \"mc1_stderr\": 0.016789289499502025,\n \"mc2\": 0.5323211184642095,\n\
\ \"mc2_stderr\": 0.015107868373889385\n },\n \"harness|arc:challenge|25\"\
: {\n \"acc\": 0.5750853242320819,\n \"acc_stderr\": 0.014445698968520767,\n\
\ \"acc_norm\": 0.6143344709897611,\n \"acc_norm_stderr\": 0.014224250973257182\n\
\ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6314479187412866,\n\
\ \"acc_stderr\": 0.0048142619663768494,\n \"acc_norm\": 0.8350926110336586,\n\
\ \"acc_norm_stderr\": 0.0037033852685121734\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \
\ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5851851851851851,\n\
\ \"acc_stderr\": 0.04256193767901408,\n \"acc_norm\": 0.5851851851851851,\n\
\ \"acc_norm_stderr\": 0.04256193767901408\n },\n \"harness|hendrycksTest-astronomy|5\"\
: {\n \"acc\": 0.6447368421052632,\n \"acc_stderr\": 0.038947344870133176,\n\
\ \"acc_norm\": 0.6447368421052632,\n \"acc_norm_stderr\": 0.038947344870133176\n\
\ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.59,\n\
\ \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \
\ \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\
: {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.02881561571343211,\n\
\ \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.02881561571343211\n\
\ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n\
\ \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \
\ \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\"\
: {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \
\ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \
\ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\
acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\"\
: 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\"\
: {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \
\ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n \
\ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.630057803468208,\n\
\ \"acc_stderr\": 0.0368122963339432,\n \"acc_norm\": 0.630057803468208,\n\
\ \"acc_norm_stderr\": 0.0368122963339432\n },\n \"harness|hendrycksTest-college_physics|5\"\
: {\n \"acc\": 0.45098039215686275,\n \"acc_stderr\": 0.04951218252396264,\n\
\ \"acc_norm\": 0.45098039215686275,\n \"acc_norm_stderr\": 0.04951218252396264\n\
\ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\
\ 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n\
\ \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\
: {\n \"acc\": 0.5702127659574469,\n \"acc_stderr\": 0.03236214467715564,\n\
\ \"acc_norm\": 0.5702127659574469,\n \"acc_norm_stderr\": 0.03236214467715564\n\
\ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n\
\ \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n\
\ \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\
: {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370332,\n\
\ \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370332\n\
\ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\
: 0.3994708994708995,\n \"acc_stderr\": 0.02522545028406788,\n \"\
acc_norm\": 0.3994708994708995,\n \"acc_norm_stderr\": 0.02522545028406788\n\
\ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3968253968253968,\n\
\ \"acc_stderr\": 0.043758884927270605,\n \"acc_norm\": 0.3968253968253968,\n\
\ \"acc_norm_stderr\": 0.043758884927270605\n },\n \"harness|hendrycksTest-global_facts|5\"\
: {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \
\ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \
\ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7387096774193549,\n\
\ \"acc_stderr\": 0.024993053397764812,\n \"acc_norm\": 0.7387096774193549,\n\
\ \"acc_norm_stderr\": 0.024993053397764812\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\
: {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n\
\ \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n\
\ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \
\ \"acc\": 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\"\
: 0.64,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\
: {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n\
\ \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n\
\ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\
: 0.7525252525252525,\n \"acc_stderr\": 0.03074630074212451,\n \"\
acc_norm\": 0.7525252525252525,\n \"acc_norm_stderr\": 0.03074630074212451\n\
\ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\
\ \"acc\": 0.8704663212435233,\n \"acc_stderr\": 0.024233532297758733,\n\
\ \"acc_norm\": 0.8704663212435233,\n \"acc_norm_stderr\": 0.024233532297758733\n\
\ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \
\ \"acc\": 0.6461538461538462,\n \"acc_stderr\": 0.024243783994062146,\n\
\ \"acc_norm\": 0.6461538461538462,\n \"acc_norm_stderr\": 0.024243783994062146\n\
\ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\
acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253255,\n \
\ \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253255\n\
\ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \
\ \"acc\": 0.6134453781512605,\n \"acc_stderr\": 0.03163145807552378,\n \
\ \"acc_norm\": 0.6134453781512605,\n \"acc_norm_stderr\": 0.03163145807552378\n\
\ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\
: 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"\
acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n\
\ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\
: 0.7926605504587156,\n \"acc_stderr\": 0.017381415563608674,\n \"\
acc_norm\": 0.7926605504587156,\n \"acc_norm_stderr\": 0.017381415563608674\n\
\ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\
: 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"\
acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n\
\ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\
: 0.7941176470588235,\n \"acc_stderr\": 0.028379449451588667,\n \"\
acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.028379449451588667\n\
\ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\
acc\": 0.7848101265822784,\n \"acc_stderr\": 0.026750826994676173,\n \
\ \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.026750826994676173\n\
\ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n\
\ \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n\
\ \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\"\
: {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596914,\n\
\ \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596914\n\
\ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\
\ 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"\
acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n\
\ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n\
\ \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.7592592592592593,\n\
\ \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\
: {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n\
\ \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n\
\ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n\
\ \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n\
\ \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\"\
: {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n\
\ \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n\
\ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n\
\ \"acc_stderr\": 0.021262719400406943,\n \"acc_norm\": 0.8803418803418803,\n\
\ \"acc_norm_stderr\": 0.021262719400406943\n },\n \"harness|hendrycksTest-medical_genetics|5\"\
: {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \
\ \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n \
\ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8020434227330779,\n\
\ \"acc_stderr\": 0.014248873549217576,\n \"acc_norm\": 0.8020434227330779,\n\
\ \"acc_norm_stderr\": 0.014248873549217576\n },\n \"harness|hendrycksTest-moral_disputes|5\"\
: {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.024946792225272314,\n\
\ \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.024946792225272314\n\
\ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2737430167597765,\n\
\ \"acc_stderr\": 0.014912413096372434,\n \"acc_norm\": 0.2737430167597765,\n\
\ \"acc_norm_stderr\": 0.014912413096372434\n },\n \"harness|hendrycksTest-nutrition|5\"\
: {\n \"acc\": 0.7124183006535948,\n \"acc_stderr\": 0.025917806117147158,\n\
\ \"acc_norm\": 0.7124183006535948,\n \"acc_norm_stderr\": 0.025917806117147158\n\
\ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n\
\ \"acc_stderr\": 0.025755865922632938,\n \"acc_norm\": 0.7106109324758842,\n\
\ \"acc_norm_stderr\": 0.025755865922632938\n },\n \"harness|hendrycksTest-prehistory|5\"\
: {\n \"acc\": 0.7283950617283951,\n \"acc_stderr\": 0.024748624490537375,\n\
\ \"acc_norm\": 0.7283950617283951,\n \"acc_norm_stderr\": 0.024748624490537375\n\
\ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\
acc\": 0.4645390070921986,\n \"acc_stderr\": 0.029752389657427047,\n \
\ \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.029752389657427047\n\
\ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.44589308996088656,\n\
\ \"acc_stderr\": 0.012695244711379776,\n \"acc_norm\": 0.44589308996088656,\n\
\ \"acc_norm_stderr\": 0.012695244711379776\n },\n \"harness|hendrycksTest-professional_medicine|5\"\
: {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.028332959514031208,\n\
\ \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.028332959514031208\n\
\ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\
acc\": 0.6486928104575164,\n \"acc_stderr\": 0.019312676065786558,\n \
\ \"acc_norm\": 0.6486928104575164,\n \"acc_norm_stderr\": 0.019312676065786558\n\
\ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n\
\ \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n\
\ \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\"\
: {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n\
\ \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n\
\ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8557213930348259,\n\
\ \"acc_stderr\": 0.02484575321230604,\n \"acc_norm\": 0.8557213930348259,\n\
\ \"acc_norm_stderr\": 0.02484575321230604\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\
: {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \
\ \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n \
\ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n\
\ \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n\
\ \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\"\
: {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.02991312723236804,\n\
\ \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.02991312723236804\n\
\ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.35862913096695226,\n\
\ \"mc1_stderr\": 0.016789289499502025,\n \"mc2\": 0.5323211184642095,\n\
\ \"mc2_stderr\": 0.015107868373889385\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7853196527229677,\n \"acc_stderr\": 0.011539912734345391\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.37680060652009095,\n \
\ \"acc_stderr\": 0.013347858757829158\n }\n}\n```"
repo_url: https://huggingface.co/CorticalStack/travel-mistral-7B-16b-base
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|arc:challenge|25_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|gsm8k|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hellaswag|10_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-management|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-virology|5_2024-02-11T09-58-07.096782.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2024-02-11T09-58-07.096782.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- '**/details_harness|winogrande|5_2024-02-11T09-58-07.096782.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2024-02-11T09-58-07.096782.parquet'
- config_name: results
data_files:
- split: 2024_02_11T09_58_07.096782
path:
- results_2024-02-11T09-58-07.096782.parquet
- split: latest
path:
- results_2024-02-11T09-58-07.096782.parquet
---
# Dataset Card for Evaluation run of CorticalStack/travel-mistral-7B-16b-base
<!-- Provide a quick summary of the dataset. -->
Dataset automatically created during the evaluation run of model [CorticalStack/travel-mistral-7B-16b-base](https://huggingface.co/CorticalStack/travel-mistral-7B-16b-base) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_CorticalStack__travel-mistral-7B-16b-base",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2024-02-11T09:58:07.096782](https://huggingface.co/datasets/open-llm-leaderboard/details_CorticalStack__travel-mistral-7B-16b-base/blob/main/results_2024-02-11T09-58-07.096782.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"acc": 0.62333273914223,
"acc_stderr": 0.032635939156381126,
"acc_norm": 0.6288825536861529,
"acc_norm_stderr": 0.03329783376871095,
"mc1": 0.35862913096695226,
"mc1_stderr": 0.016789289499502025,
"mc2": 0.5323211184642095,
"mc2_stderr": 0.015107868373889385
},
"harness|arc:challenge|25": {
"acc": 0.5750853242320819,
"acc_stderr": 0.014445698968520767,
"acc_norm": 0.6143344709897611,
"acc_norm_stderr": 0.014224250973257182
},
"harness|hellaswag|10": {
"acc": 0.6314479187412866,
"acc_stderr": 0.0048142619663768494,
"acc_norm": 0.8350926110336586,
"acc_norm_stderr": 0.0037033852685121734
},
"harness|hendrycksTest-abstract_algebra|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695236,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695236
},
"harness|hendrycksTest-anatomy|5": {
"acc": 0.5851851851851851,
"acc_stderr": 0.04256193767901408,
"acc_norm": 0.5851851851851851,
"acc_norm_stderr": 0.04256193767901408
},
"harness|hendrycksTest-astronomy|5": {
"acc": 0.6447368421052632,
"acc_stderr": 0.038947344870133176,
"acc_norm": 0.6447368421052632,
"acc_norm_stderr": 0.038947344870133176
},
"harness|hendrycksTest-business_ethics|5": {
"acc": 0.59,
"acc_stderr": 0.04943110704237102,
"acc_norm": 0.59,
"acc_norm_stderr": 0.04943110704237102
},
"harness|hendrycksTest-clinical_knowledge|5": {
"acc": 0.6754716981132075,
"acc_stderr": 0.02881561571343211,
"acc_norm": 0.6754716981132075,
"acc_norm_stderr": 0.02881561571343211
},
"harness|hendrycksTest-college_biology|5": {
"acc": 0.75,
"acc_stderr": 0.03621034121889507,
"acc_norm": 0.75,
"acc_norm_stderr": 0.03621034121889507
},
"harness|hendrycksTest-college_chemistry|5": {
"acc": 0.48,
"acc_stderr": 0.050211673156867795,
"acc_norm": 0.48,
"acc_norm_stderr": 0.050211673156867795
},
"harness|hendrycksTest-college_computer_science|5": {
"acc": 0.49,
"acc_stderr": 0.05024183937956912,
"acc_norm": 0.49,
"acc_norm_stderr": 0.05024183937956912
},
"harness|hendrycksTest-college_mathematics|5": {
"acc": 0.28,
"acc_stderr": 0.04512608598542127,
"acc_norm": 0.28,
"acc_norm_stderr": 0.04512608598542127
},
"harness|hendrycksTest-college_medicine|5": {
"acc": 0.630057803468208,
"acc_stderr": 0.0368122963339432,
"acc_norm": 0.630057803468208,
"acc_norm_stderr": 0.0368122963339432
},
"harness|hendrycksTest-college_physics|5": {
"acc": 0.45098039215686275,
"acc_stderr": 0.04951218252396264,
"acc_norm": 0.45098039215686275,
"acc_norm_stderr": 0.04951218252396264
},
"harness|hendrycksTest-computer_security|5": {
"acc": 0.71,
"acc_stderr": 0.045604802157206845,
"acc_norm": 0.71,
"acc_norm_stderr": 0.045604802157206845
},
"harness|hendrycksTest-conceptual_physics|5": {
"acc": 0.5702127659574469,
"acc_stderr": 0.03236214467715564,
"acc_norm": 0.5702127659574469,
"acc_norm_stderr": 0.03236214467715564
},
"harness|hendrycksTest-econometrics|5": {
"acc": 0.5087719298245614,
"acc_stderr": 0.04702880432049615,
"acc_norm": 0.5087719298245614,
"acc_norm_stderr": 0.04702880432049615
},
"harness|hendrycksTest-electrical_engineering|5": {
"acc": 0.5724137931034483,
"acc_stderr": 0.04122737111370332,
"acc_norm": 0.5724137931034483,
"acc_norm_stderr": 0.04122737111370332
},
"harness|hendrycksTest-elementary_mathematics|5": {
"acc": 0.3994708994708995,
"acc_stderr": 0.02522545028406788,
"acc_norm": 0.3994708994708995,
"acc_norm_stderr": 0.02522545028406788
},
"harness|hendrycksTest-formal_logic|5": {
"acc": 0.3968253968253968,
"acc_stderr": 0.043758884927270605,
"acc_norm": 0.3968253968253968,
"acc_norm_stderr": 0.043758884927270605
},
"harness|hendrycksTest-global_facts|5": {
"acc": 0.34,
"acc_stderr": 0.04760952285695235,
"acc_norm": 0.34,
"acc_norm_stderr": 0.04760952285695235
},
"harness|hendrycksTest-high_school_biology|5": {
"acc": 0.7387096774193549,
"acc_stderr": 0.024993053397764812,
"acc_norm": 0.7387096774193549,
"acc_norm_stderr": 0.024993053397764812
},
"harness|hendrycksTest-high_school_chemistry|5": {
"acc": 0.4975369458128079,
"acc_stderr": 0.03517945038691063,
"acc_norm": 0.4975369458128079,
"acc_norm_stderr": 0.03517945038691063
},
"harness|hendrycksTest-high_school_computer_science|5": {
"acc": 0.64,
"acc_stderr": 0.048241815132442176,
"acc_norm": 0.64,
"acc_norm_stderr": 0.048241815132442176
},
"harness|hendrycksTest-high_school_european_history|5": {
"acc": 0.7575757575757576,
"acc_stderr": 0.03346409881055953,
"acc_norm": 0.7575757575757576,
"acc_norm_stderr": 0.03346409881055953
},
"harness|hendrycksTest-high_school_geography|5": {
"acc": 0.7525252525252525,
"acc_stderr": 0.03074630074212451,
"acc_norm": 0.7525252525252525,
"acc_norm_stderr": 0.03074630074212451
},
"harness|hendrycksTest-high_school_government_and_politics|5": {
"acc": 0.8704663212435233,
"acc_stderr": 0.024233532297758733,
"acc_norm": 0.8704663212435233,
"acc_norm_stderr": 0.024233532297758733
},
"harness|hendrycksTest-high_school_macroeconomics|5": {
"acc": 0.6461538461538462,
"acc_stderr": 0.024243783994062146,
"acc_norm": 0.6461538461538462,
"acc_norm_stderr": 0.024243783994062146
},
"harness|hendrycksTest-high_school_mathematics|5": {
"acc": 0.337037037037037,
"acc_stderr": 0.028820884666253255,
"acc_norm": 0.337037037037037,
"acc_norm_stderr": 0.028820884666253255
},
"harness|hendrycksTest-high_school_microeconomics|5": {
"acc": 0.6134453781512605,
"acc_stderr": 0.03163145807552378,
"acc_norm": 0.6134453781512605,
"acc_norm_stderr": 0.03163145807552378
},
"harness|hendrycksTest-high_school_physics|5": {
"acc": 0.3576158940397351,
"acc_stderr": 0.03913453431177258,
"acc_norm": 0.3576158940397351,
"acc_norm_stderr": 0.03913453431177258
},
"harness|hendrycksTest-high_school_psychology|5": {
"acc": 0.7926605504587156,
"acc_stderr": 0.017381415563608674,
"acc_norm": 0.7926605504587156,
"acc_norm_stderr": 0.017381415563608674
},
"harness|hendrycksTest-high_school_statistics|5": {
"acc": 0.5092592592592593,
"acc_stderr": 0.034093869469927006,
"acc_norm": 0.5092592592592593,
"acc_norm_stderr": 0.034093869469927006
},
"harness|hendrycksTest-high_school_us_history|5": {
"acc": 0.7941176470588235,
"acc_stderr": 0.028379449451588667,
"acc_norm": 0.7941176470588235,
"acc_norm_stderr": 0.028379449451588667
},
"harness|hendrycksTest-high_school_world_history|5": {
"acc": 0.7848101265822784,
"acc_stderr": 0.026750826994676173,
"acc_norm": 0.7848101265822784,
"acc_norm_stderr": 0.026750826994676173
},
"harness|hendrycksTest-human_aging|5": {
"acc": 0.6771300448430493,
"acc_stderr": 0.03138147637575499,
"acc_norm": 0.6771300448430493,
"acc_norm_stderr": 0.03138147637575499
},
"harness|hendrycksTest-human_sexuality|5": {
"acc": 0.7633587786259542,
"acc_stderr": 0.03727673575596914,
"acc_norm": 0.7633587786259542,
"acc_norm_stderr": 0.03727673575596914
},
"harness|hendrycksTest-international_law|5": {
"acc": 0.7933884297520661,
"acc_stderr": 0.03695980128098824,
"acc_norm": 0.7933884297520661,
"acc_norm_stderr": 0.03695980128098824
},
"harness|hendrycksTest-jurisprudence|5": {
"acc": 0.7592592592592593,
"acc_stderr": 0.04133119440243839,
"acc_norm": 0.7592592592592593,
"acc_norm_stderr": 0.04133119440243839
},
"harness|hendrycksTest-logical_fallacies|5": {
"acc": 0.7607361963190185,
"acc_stderr": 0.033519538795212696,
"acc_norm": 0.7607361963190185,
"acc_norm_stderr": 0.033519538795212696
},
"harness|hendrycksTest-machine_learning|5": {
"acc": 0.4642857142857143,
"acc_stderr": 0.04733667890053756,
"acc_norm": 0.4642857142857143,
"acc_norm_stderr": 0.04733667890053756
},
"harness|hendrycksTest-management|5": {
"acc": 0.7669902912621359,
"acc_stderr": 0.04185832598928315,
"acc_norm": 0.7669902912621359,
"acc_norm_stderr": 0.04185832598928315
},
"harness|hendrycksTest-marketing|5": {
"acc": 0.8803418803418803,
"acc_stderr": 0.021262719400406943,
"acc_norm": 0.8803418803418803,
"acc_norm_stderr": 0.021262719400406943
},
"harness|hendrycksTest-medical_genetics|5": {
"acc": 0.7,
"acc_stderr": 0.046056618647183814,
"acc_norm": 0.7,
"acc_norm_stderr": 0.046056618647183814
},
"harness|hendrycksTest-miscellaneous|5": {
"acc": 0.8020434227330779,
"acc_stderr": 0.014248873549217576,
"acc_norm": 0.8020434227330779,
"acc_norm_stderr": 0.014248873549217576
},
"harness|hendrycksTest-moral_disputes|5": {
"acc": 0.6878612716763006,
"acc_stderr": 0.024946792225272314,
"acc_norm": 0.6878612716763006,
"acc_norm_stderr": 0.024946792225272314
},
"harness|hendrycksTest-moral_scenarios|5": {
"acc": 0.2737430167597765,
"acc_stderr": 0.014912413096372434,
"acc_norm": 0.2737430167597765,
"acc_norm_stderr": 0.014912413096372434
},
"harness|hendrycksTest-nutrition|5": {
"acc": 0.7124183006535948,
"acc_stderr": 0.025917806117147158,
"acc_norm": 0.7124183006535948,
"acc_norm_stderr": 0.025917806117147158
},
"harness|hendrycksTest-philosophy|5": {
"acc": 0.7106109324758842,
"acc_stderr": 0.025755865922632938,
"acc_norm": 0.7106109324758842,
"acc_norm_stderr": 0.025755865922632938
},
"harness|hendrycksTest-prehistory|5": {
"acc": 0.7283950617283951,
"acc_stderr": 0.024748624490537375,
"acc_norm": 0.7283950617283951,
"acc_norm_stderr": 0.024748624490537375
},
"harness|hendrycksTest-professional_accounting|5": {
"acc": 0.4645390070921986,
"acc_stderr": 0.029752389657427047,
"acc_norm": 0.4645390070921986,
"acc_norm_stderr": 0.029752389657427047
},
"harness|hendrycksTest-professional_law|5": {
"acc": 0.44589308996088656,
"acc_stderr": 0.012695244711379776,
"acc_norm": 0.44589308996088656,
"acc_norm_stderr": 0.012695244711379776
},
"harness|hendrycksTest-professional_medicine|5": {
"acc": 0.6801470588235294,
"acc_stderr": 0.028332959514031208,
"acc_norm": 0.6801470588235294,
"acc_norm_stderr": 0.028332959514031208
},
"harness|hendrycksTest-professional_psychology|5": {
"acc": 0.6486928104575164,
"acc_stderr": 0.019312676065786558,
"acc_norm": 0.6486928104575164,
"acc_norm_stderr": 0.019312676065786558
},
"harness|hendrycksTest-public_relations|5": {
"acc": 0.6636363636363637,
"acc_stderr": 0.04525393596302506,
"acc_norm": 0.6636363636363637,
"acc_norm_stderr": 0.04525393596302506
},
"harness|hendrycksTest-security_studies|5": {
"acc": 0.7224489795918367,
"acc_stderr": 0.028666857790274648,
"acc_norm": 0.7224489795918367,
"acc_norm_stderr": 0.028666857790274648
},
"harness|hendrycksTest-sociology|5": {
"acc": 0.8557213930348259,
"acc_stderr": 0.02484575321230604,
"acc_norm": 0.8557213930348259,
"acc_norm_stderr": 0.02484575321230604
},
"harness|hendrycksTest-us_foreign_policy|5": {
"acc": 0.85,
"acc_stderr": 0.03588702812826371,
"acc_norm": 0.85,
"acc_norm_stderr": 0.03588702812826371
},
"harness|hendrycksTest-virology|5": {
"acc": 0.5240963855421686,
"acc_stderr": 0.03887971849597264,
"acc_norm": 0.5240963855421686,
"acc_norm_stderr": 0.03887971849597264
},
"harness|hendrycksTest-world_religions|5": {
"acc": 0.8128654970760234,
"acc_stderr": 0.02991312723236804,
"acc_norm": 0.8128654970760234,
"acc_norm_stderr": 0.02991312723236804
},
"harness|truthfulqa:mc|0": {
"mc1": 0.35862913096695226,
"mc1_stderr": 0.016789289499502025,
"mc2": 0.5323211184642095,
"mc2_stderr": 0.015107868373889385
},
"harness|winogrande|5": {
"acc": 0.7853196527229677,
"acc_stderr": 0.011539912734345391
},
"harness|gsm8k|5": {
"acc": 0.37680060652009095,
"acc_stderr": 0.013347858757829158
}
}
```
## Dataset Details
### Dataset Description
<!-- Provide a longer summary of what this dataset is. -->
- **Curated by:** [More Information Needed]
- **Funded by [optional]:** [More Information Needed]
- **Shared by [optional]:** [More Information Needed]
- **Language(s) (NLP):** [More Information Needed]
- **License:** [More Information Needed]
### Dataset Sources [optional]
<!-- Provide the basic links for the dataset. -->
- **Repository:** [More Information Needed]
- **Paper [optional]:** [More Information Needed]
- **Demo [optional]:** [More Information Needed]
## Uses
<!-- Address questions around how the dataset is intended to be used. -->
### Direct Use
<!-- This section describes suitable use cases for the dataset. -->
[More Information Needed]
### Out-of-Scope Use
<!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. -->
[More Information Needed]
## Dataset Structure
<!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. -->
[More Information Needed]
## Dataset Creation
### Curation Rationale
<!-- Motivation for the creation of this dataset. -->
[More Information Needed]
### Source Data
<!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). -->
#### Data Collection and Processing
<!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. -->
[More Information Needed]
#### Who are the source data producers?
<!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. -->
[More Information Needed]
### Annotations [optional]
<!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. -->
#### Annotation process
<!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. -->
[More Information Needed]
#### Who are the annotators?
<!-- This section describes the people or systems who created the annotations. -->
[More Information Needed]
#### Personal and Sensitive Information
<!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. -->
[More Information Needed]
## Bias, Risks, and Limitations
<!-- This section is meant to convey both technical and sociotechnical limitations. -->
[More Information Needed]
### Recommendations
<!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.
## Citation [optional]
<!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. -->
**BibTeX:**
[More Information Needed]
**APA:**
[More Information Needed]
## Glossary [optional]
<!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. -->
[More Information Needed]
## More Information [optional]
[More Information Needed]
## Dataset Card Authors [optional]
[More Information Needed]
## Dataset Card Contact
[More Information Needed] |
arieg/cluster19_large_150 | ---
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': '000715'
'1': 000890
'2': '001544'
'3': 002096
'4': '007011'
'5': '010675'
'6': 015880
'7': '020375'
'8': 021842
'9': '024216'
'10': 024218
'11': 038847
'12': 040509
'13': '041573'
'14': 045390
'15': 045392
'16': 045393
'17': '045517'
'18': 048046
'19': 052859
'20': 052860
'21': '054033'
'22': 057822
'23': 058173
'24': '063655'
'25': '066073'
'26': 068680
'27': 068682
'28': 068683
'29': 068875
'30': 069207
'31': 069209
'32': 072930
'33': 074959
'34': 074960
'35': 080389
'36': 081638
'37': 081803
'38': 082157
'39': 084195
'40': 084200
'41': 084201
'42': 085290
'43': 087102
'44': 087106
'45': 090592
'46': 090617
'47': 092950
'48': 093364
'49': 093917
'50': 094093
'51': 094411
'52': 097279
'53': 097283
'54': 098202
'55': '105672'
'56': '106629'
'57': '108464'
'58': '108471'
'59': '108885'
'60': '109189'
'61': '110274'
'62': '111147'
'63': '111222'
'64': '111227'
'65': '111230'
'66': '111819'
'67': '113110'
'68': '114398'
'69': '114405'
'70': '115767'
'71': '117991'
'72': '118223'
'73': '118952'
'74': '121976'
'75': '123484'
'76': '123486'
'77': '124874'
'78': '124876'
'79': '126187'
'80': '126455'
'81': '126512'
'82': '126519'
'83': '126979'
'84': '127036'
'85': '127037'
'86': '127878'
'87': '127936'
'88': '128816'
'89': '130369'
'90': '130988'
'91': '131911'
'92': '131912'
'93': '133434'
'94': '133436'
'95': '133446'
'96': '133456'
'97': '133459'
'98': '133545'
'99': '133551'
'100': '133564'
'101': '133574'
'102': '134580'
'103': '134586'
'104': '135092'
'105': '136138'
'106': '137166'
'107': '137167'
'108': '137168'
'109': '137169'
'110': '137170'
'111': '137171'
'112': '137172'
'113': '137173'
'114': '137175'
'115': '137176'
'116': '137567'
'117': '137627'
'118': '138021'
'119': '139003'
'120': '139485'
'121': '139681'
'122': '139876'
'123': '141137'
'124': '141141'
'125': '141972'
'126': '142094'
'127': '142096'
'128': '142098'
'129': '145431'
splits:
- name: train
num_bytes: 994536368.0
num_examples: 19500
download_size: 1025002189
dataset_size: 994536368.0
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
|
lcw99/oscar-ko-only | ---
language:
- ko
---
# oscar dataset only korean |
mii-llm/sft-v01 | ---
dataset_info:
features:
- name: conversations
list:
- name: from
dtype: string
- name: value
dtype: string
- name: prompt
dtype: string
splits:
- name: train
num_bytes: 22592740
num_examples: 12227
download_size: 11534930
dataset_size: 22592740
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "sft-v01"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
gabraken/mtg-detection | ---
license: mit
dataset_info:
features:
- name: id
dtype: string
- name: image
dtype: binary
- name: annotation
sequence:
sequence: float64
- name: metadata
dtype: string
splits:
- name: train
num_bytes: 22088296176
num_examples: 10000
- name: test
num_bytes: 6615226028
num_examples: 3000
download_size: 28512980450
dataset_size: 28703522204
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
task_categories:
- object-detection
language:
- en
tags:
- mtg
- detection
- synthetic
pretty_name: Magic The Gathering Card Detection Dataset
size_categories:
- 10K<n<100K
---
## Magic The Gathering Card Detection Dataset
This dataset is dedicated to people wanting to build card detection models.
It will emulate MTG cards in random positions and provide the visible corners positions for each card as well as the direction to the next corner for each corner.
### Example

It contains 10k 1024x1024 pictures in the train split and 3k in the test split.
## Structure
Each row of the dataset contains:
- id: (int) image id
- image: (binary) The binary image
- annotation: (array<point>) An array of corners representations:
- x: float [0, 1] x relative position in the image
- y: float [0, 1] y relative position in the image
- visible: bool Is the point visible or hidden by other cards
- angle: float [-PI, PI] angle of the vector going to the next corner
- corner_id: int [0, 1, 2, 3] which card corner (top left, top right, bottom right, bottom left)
- A string containg JSON data: all metadata associated with each card present in the frame if one wants to go further [rarity detection / frame types / artists / mana values / etc...]
## Credits:
This dataset is based on other existing MIT Licensed dataset:
- MTG-Json
- Scryfall
This project is unofficial Fan Content permitted under the Fan Content Policy. Not approved/endorsed by Wizards. Portions of the materials used are property of Wizards of the Coast.
ยฉWizards of the Coast LLC. |
imthanhlv/laion2B-multi-Vietnamese-subset | ---
license: cc-by-4.0
task_categories:
- text-to-image
- image-to-text
language:
- vi
---
# Dataset Card for LAION-2B-multi Vietnamese subset
### Dataset Summary
Filter the Vietnamese subset from [Laion2B-multi](https://huggingface.co/datasets/laion/laion2B-multi)
To get the subset of your language, check out [this notebook](https://colab.research.google.com/drive/1bPvgFPKEIjzw7wT_9GwlDPvgTYDFdblr?usp=sharing) |
PrasannaL/SQLPG | ---
license: llama2
---
|
Nexdata/multi_language | ---
YAML tags:
- copy-paste the tags obtained with the tagging app: https://github.com/huggingface/datasets-tagging
task_categories:
- automatic-speech-recognition
language:
- en
- de
- fr
- it
- es
- ko
- ja
---
# Dataset Card for multi_language
## Table of Contents
- [Table of Contents](#table-of-contents)
- [Dataset Description](#dataset-description)
- [Dataset Summary](#dataset-summary)
- [Supported Tasks and Leaderboards](#supported-tasks-and-leaderboards)
- [Languages](#languages)
- [Dataset Structure](#dataset-structure)
- [Data Instances](#data-instances)
- [Data Fields](#data-fields)
- [Data Splits](#data-splits)
- [Dataset Creation](#dataset-creation)
- [Curation Rationale](#curation-rationale)
- [Source Data](#source-data)
- [Annotations](#annotations)
- [Personal and Sensitive Information](#personal-and-sensitive-information)
- [Considerations for Using the Data](#considerations-for-using-the-data)
- [Social Impact of Dataset](#social-impact-of-dataset)
- [Discussion of Biases](#discussion-of-biases)
- [Other Known Limitations](#other-known-limitations)
- [Additional Information](#additional-information)
- [Dataset Curators](#dataset-curators)
- [Licensing Information](#licensing-information)
- [Citation Information](#citation-information)
- [Contributions](#contributions)
## Dataset Description
- **Homepage:** https://nexdata.ai/?source=Huggingface
- **Repository:**
- **Paper:**
- **Leaderboard:**
- **Point of Contact:**
### Dataset Summary
The dataset contains 25,000 hours of multi-language reading speech data. It's recorded by native speakers, covering English, French, German, Russian, Spanish, Portuguese, Italian, Japanese, Korean, Hindi, Vietnamese, Tagalog, Thai etc.The recording is rich in content, covering multiple categories such as economy, entertainment, news, oral language, numbers, and letters. The format is 16kHz, 16bit, uncompressed wav, mono channel. The sentence accuracy is over 95%.
For more details, please refer to the link: https://nexdata.ai/speechRecognition?source=Huggingface
### Supported Tasks and Leaderboards
automatic-speech-recognition, audio-speaker-identification: The dataset can be used to train a model for Automatic Speech Recognition (ASR).
### Languages
English, French, German, Russian, Spanish, Portuguese, Italian, Japanese, Korean, Hindi, Vietnamese, Tagalog, Thai etc.
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
Commercial License
### Citation Information
[More Information Needed]
### Contributions |
CyberHarem/aversa_fireemblem | ---
license: mit
task_categories:
- text-to-image
tags:
- art
- not-for-all-audiences
size_categories:
- n<1K
---
# Dataset of aversa/ใคใณใใผใน (Fire Emblem)
This is the dataset of aversa/ใคใณใใผใน (Fire Emblem), containing 62 images and their tags.
The core tags of this character are `long_hair, breasts, white_hair, facial_mark, dark-skinned_female, dark_skin, large_breasts, red_eyes`, which are pruned in this dataset.
Images are crawled from many sites (e.g. danbooru, pixiv, zerochan ...), the auto-crawling system is powered by [DeepGHS Team](https://github.com/deepghs)([huggingface organization](https://huggingface.co/deepghs)).
## List of Packages
| Name | Images | Size | Download | Type | Description |
|:-----------------|---------:|:-----------|:-------------------------------------------------------------------------------------------------------------------|:-----------|:---------------------------------------------------------------------|
| raw | 62 | 75.65 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aversa_fireemblem/resolve/main/dataset-raw.zip) | Waifuc-Raw | Raw data with meta information (min edge aligned to 1400 if larger). |
| 800 | 62 | 43.18 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aversa_fireemblem/resolve/main/dataset-800.zip) | IMG+TXT | dataset with the shorter side not exceeding 800 pixels. |
| stage3-p480-800 | 131 | 83.90 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aversa_fireemblem/resolve/main/dataset-stage3-p480-800.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
| 1200 | 62 | 66.79 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aversa_fireemblem/resolve/main/dataset-1200.zip) | IMG+TXT | dataset with the shorter side not exceeding 1200 pixels. |
| stage3-p480-1200 | 131 | 120.03 MiB | [Download](https://huggingface.co/datasets/CyberHarem/aversa_fireemblem/resolve/main/dataset-stage3-p480-1200.zip) | IMG+TXT | 3-stage cropped dataset with the area not less than 480x480 pixels. |
### Load Raw Dataset with Waifuc
We provide raw dataset (including tagged images) for [waifuc](https://deepghs.github.io/waifuc/main/tutorials/installation/index.html) loading. If you need this, just run the following code
```python
import os
import zipfile
from huggingface_hub import hf_hub_download
from waifuc.source import LocalSource
# download raw archive file
zip_file = hf_hub_download(
repo_id='CyberHarem/aversa_fireemblem',
repo_type='dataset',
filename='dataset-raw.zip',
)
# extract files to your directory
dataset_dir = 'dataset_dir'
os.makedirs(dataset_dir, exist_ok=True)
with zipfile.ZipFile(zip_file, 'r') as zf:
zf.extractall(dataset_dir)
# load the dataset with waifuc
source = LocalSource(dataset_dir)
for item in source:
print(item.image, item.meta['filename'], item.meta['tags'])
```
## List of Clusters
List of tag clustering result, maybe some outfits can be mined here.
### Raw Text Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | Tags |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| 0 | 37 |  |  |  |  |  | 1girl, solo, cleavage, looking_at_viewer, thighhighs, smile, nail_polish, simple_background, bridal_gauntlets, navel, black_nails, book, dress, jewelry, long_fingernails, white_background |
### Table Version
| # | Samples | Img-1 | Img-2 | Img-3 | Img-4 | Img-5 | 1girl | solo | cleavage | looking_at_viewer | thighhighs | smile | nail_polish | simple_background | bridal_gauntlets | navel | black_nails | book | dress | jewelry | long_fingernails | white_background |
|----:|----------:|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------|:-------|:-----------|:--------------------|:-------------|:--------|:--------------|:--------------------|:-------------------|:--------|:--------------|:-------|:--------|:----------|:-------------------|:-------------------|
| 0 | 37 |  |  |  |  |  | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X |
|
kushalps/cpsc2018 | ---
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: validation
path: data/validation-*
- split: test
path: data/test-*
dataset_info:
features:
- name: image
dtype: image
- name: label
dtype:
class_label:
names:
'0': 1AVB
'1': AF
'2': LBBB
'3': Normal
'4': PAC
'5': PVC
'6': RBBB
'7': STD
'8': STE
splits:
- name: train
num_bytes: 2271502441.611
num_examples: 44327
- name: validation
num_bytes: 15416122.0
num_examples: 285
- name: test
num_bytes: 66362558.867
num_examples: 1283
download_size: 2478695413
dataset_size: 2353281122.478
---
# Dataset Card for "cpsc2018"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
jonbarlow/coarse5-labeled-urls-headers | ---
dataset_info:
features:
- name: id
dtype: int64
- name: url
dtype: string
- name: label
dtype: string
- name: coarse_label
dtype: string
- name: html_title
dtype: string
- name: meta_description
dtype: string
- name: http_response
dtype: float64
- name: h1
dtype: string
- name: h2
dtype: string
- name: h3
dtype: string
- name: h4
dtype: string
- name: h5
dtype: string
- name: h6
dtype: string
splits:
- name: train
num_bytes: 1228661.0604011193
num_examples: 2909
- name: test
num_bytes: 129243.82416044777
num_examples: 306
download_size: 580471
dataset_size: 1357904.884561567
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
- split: test
path: data/test-*
---
|
pallie/restaurantNLP | ---
license: apache-2.0
---
|
taeshahn/ko-lima | ---
configs:
- config_name: default
data_files:
- split: train
path: plain/train.jsonl
- split: test
path: plain/test.jsonl
- config_name: plain
data_files:
- split: train
path: plain/train.jsonl
- split: test
path: plain/test.jsonl
- config_name: vicuna
data_files:
- split: train
path: vicuna/train.jsonl
- split: test
path: vicuna/test.jsonl
language:
- ko
tags:
- lima
- kolima
- korean
- instruction
size_categories:
- 1K<n<10K
license: cc-by-nc-sa-4.0
---
# Dataset Card for KoLIMA
## Dataset Description
KoLIMA๋ Meta์์ ๊ณต๊ฐํ [LIMA: Less Is More for Alignment](https://arxiv.org/abs/2305.11206) (Zhou et al., 2023)์ [ํ์ต ๋ฐ์ดํฐ](https://huggingface.co/datasets/GAIR/lima)๋ฅผ ํ๊ตญ์ด๋ก ๋ฒ์ญํ ๋ฐ์ดํฐ์
์
๋๋ค. ๋ฒ์ญ์๋ [DeepL API](https://www.deepl.com/docs-api)๋ฅผ ํ์ฉํ์๊ณ , SK(์ฃผ) Tech Collaborative Lab์ผ๋ก๋ถํฐ ๋น์ฉ์ ์ง์๋ฐ์์ต๋๋ค. ์ ์ฒด ํ
์คํธ ์ค์์ code block์ด๋ ์์์ ๋ํ๋ด๋ ํน์๋ฌธ์ ์ฌ์ด์ ํ
์คํธ๋ ์๋ฌธ์ ์ ์งํ๋ ํํ๋ก ๋ฒ์ญ์ ์งํํ์์ผ๋ฉฐ, `train` ๋ฐ์ดํฐ์
1,030๊ฑด๊ณผ `test` ๋ฐ์ดํฐ์
300๊ฑด์ผ๋ก ๊ตฌ์ฑ๋ ์ด 1,330๊ฑด์ ๋ฐ์ดํฐ๋ฅผ ํ์ฉํ์ค ์ ์์ต๋๋ค. ํ์ฌ ๋์ผํ ๋ฒ์ญ ๋ฌธ์ฅ์ `plain`, `vicuna` ๋ ๊ฐ์ง ํฌ๋ฉง์ผ๋ก ์ ๊ณตํฉ๋๋ค.
๋ฐ์ดํฐ์
๊ด๋ จํ์ฌ ๋ฌธ์๊ฐ ์์ผ์ ๊ฒฝ์ฐ [๋ฉ์ผ](mailto:taes.hahn@gmail.com)์ ํตํด ์ฐ๋ฝ์ฃผ์ธ์! ๐ฅฐ
This is Korean LIMA dataset, which is translated from the [LIMA dataset](https://huggingface.co/datasets/GAIR/lima) that Meta's [LIMA model](https://arxiv.org/abs/2305.11206) (Zhou et al., 2023) was trained on. The translation has proceeded through [DeepL API](https://www.deepl.com/docs-api) with financial support from Tech Collaborative Lab in SK Inc.
Please feel free to contact me if you have any question on the dataset.
I'm best reached via [email](mailto:taes.hahn@gmail.com).
#### Changelog
- [29 June 2023] New format added: `vicuna` format with the same translation is now available.
- [16 June 2023] Enhanced Translation: keep the text enclosed in special characters, e.g. `$` and `` ` ``, intact without translation.
- [14 June 2023] First upload.
### Usage
```python
>>> from datasets import load_dataset
>>> ko_lima = load_dataset('taeshahn/ko-lima', 'plain') # or load_dataset('taeshahn/ko-lima')
>>> ko_lima_vicuna = load_dataset('taeshahn/ko-lima', 'vicuna')
```
```python
>>> ko_lima['train'][1025]
{
'conversations': [
'์ ๋ ์ผ๋ ์ถ์ ์
๋๋ค. ๋ง๋ช
์ ์ ์ฒญํ๊ณ ์ถ์๋ฐ ๋น์์์ด ๋ค๋๋๋๋ก ๋ง๋ช
์ ์ ์ฒญํ๊ธฐ ์ํด ์ฌํํ ์ ์๋์? ์ผ๋ํญ๊ณต์์ ์ฌ๊ถ์ ์์งํ ๊ฒฝ์ฐ ์คํคํด ๊ณตํญ์ผ๋ก ๊ฐ๋ ๋น์์์ด ๋นํ๊ธฐ์ ํ์นํ ์ ์๋์?',
'ํญ๊ณต์ฌ๊ฐ ํ์น์ ํ์ฉํ ๊ฐ๋ฅ์ฑ์ ๊ทนํ ๋ฎ์ต๋๋ค. ๋ค๋๋๋์ ์
๊ตญํ๋ ค๋ ์ผ๋ ์๋ฏผ์ ๋น์, ์ฒด๋ฅ ๊ธฐ๊ฐ์ ํ์ํ ์ถฉ๋ถํ ์๊ธ ์ฆ๋ช
, ๋ค์ ๋ชฉ์ ์ง ์
๊ตญ์ ํ์ํ ์๋ฅ๋ฅผ ์์งํด์ผ ํฉ๋๋ค. ๋ํ ํญ๊ณต์ฌ๋ ์ผ๋์์ ์ถ๋ฐํ๋ ์น๊ฐ์ ๋ํด ํน๋ณ ์กฐ์ฌ๋ฅผ ์ค์ํด์ผ ํฉ๋๋ค:\n\n> ๋ค์ ๊ณตํญ์์ ๋ค๋๋๋๋ก ์ดํญํ๋ ํญ๊ณต์ฌ:\n\n์๋ถ๋ค๋น(AUH), ์ํฌ๋ผ(ACC), ๋ฐ๋ ์ธ(BAH), ๋ฒ ์ด์ง ์บํผํ(PEK), ๋ณด๋ฆฌ์คํ(KBP), ์นด์ด๋ก(CAI), ๋ด๋ง(DMM), ๋ค๋ฅด์์ค์ด๋(DAR), ๋๋ฐ์ด(DXB), ์ํ
๋ฒ (EBB), ๊ด์ ์ฐ(CAN), ํ์ฝฉ(HKG), ํ๋ง๋(DOH), ์ด์คํ๋ถ ์ํํ๋ฅดํฌ(IST), ์ด์คํ๋ถ ์ฌ๋นํ๊ณก์ฒธ(SAW), ์ํ๋ค์ค๋ฒ๊ทธ(JNB), ํค๊ฐ๋ฆฌ(KGL), ํฌ๋ฆฌ๋ง์๋ก(JRO), ์ฟ ์๋ผ๋ฃธํธ๋ฅด(KUL), ์ฟ ์จ์ดํธ(KWI), ๋ผ๊ณ ์ค(LOS), ๋ชจ์คํฌ๋ฐ ์
ฐ๋ ๋ฉํฐ์๋ณด(SVO), ๋ฌด์ค์นดํธ(MCT), ๋์ด๋ก๋น(NB ), ๋ด๋ธ๋ฆฌ(DEL), ์ํ์ธ๋ฃจ(GRU), ์ฑ๊ฐํฌ๋ฅด(SIN) ๋ฐ ํ
ํค๋(IKA)์ ๋ค๋๋๋ ๋์ฐฉ ์ ์ ์ ํ ์ฌํ ์๋ฅ๊ฐ ์์ด ์
๊ตญํ ์ ์๋ ๊ฒฝ์ฐ ํด๋น ํญ๊ณตํธ์ ๋ชจ๋ ์น๊ฐ์ ์ฌํ ์๋ฅ ์ฌ๋ณธ์ ์ ์ถํ์
์ผ ํฉ๋๋ค. ์ฌ๋ณธ์๋ ์ฌ๊ถ/์ฌํ ์๋ฅ์ ๋ฐ์ดํฐ ํ์ด์ง, ๋น์๊ฐ ์๋ ํ์ด์ง, ์ถ๊ตญ/ํต๊ด ์คํฌํ๊ฐ ์๋ ํ์ด์ง(ํด๋น๋๋ ๊ฒฝ์ฐ)๊ฐ ํฌํจ๋์ด์ผ ํฉ๋๋ค. ์ด๋ฌํ ์
๊ตญ ๊ท์ ์ ์ค์ํ์ง ์์ ๊ฒฝ์ฐ ํญ๊ณต์ฌ์ ์น๊ฐ 1์ธ๋น ์ต๋ 11,250์ ๋ก์ ๋ฒ๊ธ์ด ๋ถ๊ณผ๋ ์ ์์ต๋๋ค.\n\n๋ง์ง๋ง ๋ฌธ์ฅ์์ ๋ณผ ์ ์๋ฏ์ด ํญ๊ณต์ฌ๋ ํ์น ์ ์ ์ฌํ ์๋ฅ๋ฅผ ํ์ธํด์ผ ํ๋ ๊ฐ๋ ฅํ ์ธ์ผํฐ๋ธ๋ฅผ ๊ฐ๊ฒ ๋ฉ๋๋ค.',
'์ธ์ฉ๋ ์๋ฃ์ ์ถ์ฒ๋ ๋ฌด์์ธ๊ฐ์?',
'์ถ์ฒ๋ ํญ๊ณต์ ํตํด ํด์ธ๋ก ์ฌํํ๋ ์น๊ฐ์ ์ฌ๊ถ ๋ฐ ๋น์ ์๊ฑด๊ณผ ๊ฐ์ ์๋ฅ ์๊ฑด์ด ํฌํจ๋ ๋ฐ์ดํฐ๋ฒ ์ด์ค์ธ Timatic(์ฌํ ์ ๋ณด ๋งค๋ด์ผ ์๋)์์ ๊ฐ์ ธ์จ ๊ฒ์
๋๋ค. ์์ ๋ต๋ณ์ ๋ชฉ์ ์ง๊ฐ ๋ค๋๋๋์ด๊ณ ๊ท๊ตญ/๊ท๊ตญ ์ฌํ ์ฆ๋น์ด ์์ผ๋ฉฐ 5์ผ๊ฐ ์ฒด๋ฅํ ๊ณํ์ด๋ฉฐ ์ผ๋ ์ฌ๊ถ์ ์ ํจ ๊ธฐ๊ฐ์ด ์ฝ 3๋
๋จ์ ์ผ๋ ์๋ฏผ์ด ์ผ๋์์ ์ง์ ์ฌํํ๋ ์๋๋ฆฌ์ค๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ํ ๊ฒ์
๋๋ค.'
],
'source': 'multi_turn'
}
```
```python
>>> ko_lima_vicuna['train'][1025]
{
'id': '1026-multi_turn',
'conversations': [
{
'from': '์ฌ๋',
'value': '์ ๋ ์ผ๋ ์ถ์ ์
๋๋ค. ๋ง๋ช
์ ์ ์ฒญํ๊ณ ์ถ์๋ฐ ๋น์์์ด ๋ค๋๋๋๋ก ๋ง๋ช
์ ์ ์ฒญํ๊ธฐ ์ํด ์ฌํํ ์ ์๋์? ์ผ๋ํญ๊ณต์์ ์ฌ๊ถ์ ์์งํ ๊ฒฝ์ฐ ์คํคํด ๊ณตํญ์ผ๋ก ๊ฐ๋ ๋น์์์ด ๋นํ๊ธฐ์ ํ์นํ ์ ์๋์?'
},
{
'from': '์ด์์คํดํธ',
'value': 'ํญ๊ณต์ฌ๊ฐ ํ์น์ ํ์ฉํ ๊ฐ๋ฅ์ฑ์ ๊ทนํ ๋ฎ์ต๋๋ค. ๋ค๋๋๋์ ์
๊ตญํ๋ ค๋ ์ผ๋ ์๋ฏผ์ ๋น์, ์ฒด๋ฅ ๊ธฐ๊ฐ์ ํ์ํ ์ถฉ๋ถํ ์๊ธ ์ฆ๋ช
, ๋ค์ ๋ชฉ์ ์ง ์
๊ตญ์ ํ์ํ ์๋ฅ๋ฅผ ์์งํด์ผ ํฉ๋๋ค. ๋ํ ํญ๊ณต์ฌ๋ ์ผ๋์์ ์ถ๋ฐํ๋ ์น๊ฐ์ ๋ํด ํน๋ณ ์กฐ์ฌ๋ฅผ ์ค์ํด์ผ ํฉ๋๋ค:\n\n> ๋ค์ ๊ณตํญ์์ ๋ค๋๋๋๋ก ์ดํญํ๋ ํญ๊ณต์ฌ:\n\n์๋ถ๋ค๋น(AUH), ์ํฌ๋ผ(ACC), ๋ฐ๋ ์ธ(BAH), ๋ฒ ์ด์ง ์บํผํ(PEK), ๋ณด๋ฆฌ์คํ(KBP), ์นด์ด๋ก(CAI), ๋ด๋ง(DMM), ๋ค๋ฅด์์ค์ด๋(DAR), ๋๋ฐ์ด(DXB), ์ํ
๋ฒ (EBB), ๊ด์ ์ฐ(CAN), ํ์ฝฉ(HKG), ํ๋ง๋(DOH), ์ด์คํ๋ถ ์ํํ๋ฅดํฌ(IST), ์ด์คํ๋ถ ์ฌ๋นํ๊ณก์ฒธ(SAW), ์ํ๋ค์ค๋ฒ๊ทธ(JNB), ํค๊ฐ๋ฆฌ(KGL), ํฌ๋ฆฌ๋ง์๋ก(JRO), ์ฟ ์๋ผ๋ฃธํธ๋ฅด(KUL), ์ฟ ์จ์ดํธ(KWI), ๋ผ๊ณ ์ค(LOS), ๋ชจ์คํฌ๋ฐ ์
ฐ๋ ๋ฉํฐ์๋ณด(SVO), ๋ฌด์ค์นดํธ(MCT), ๋์ด๋ก๋น(NB ), ๋ด๋ธ๋ฆฌ(DEL), ์ํ์ธ๋ฃจ(GRU), ์ฑ๊ฐํฌ๋ฅด(SIN) ๋ฐ ํ
ํค๋(IKA)์ ๋ค๋๋๋ ๋์ฐฉ ์ ์ ์ ํ ์ฌํ ์๋ฅ๊ฐ ์์ด ์
๊ตญํ ์ ์๋ ๊ฒฝ์ฐ ํด๋น ํญ๊ณตํธ์ ๋ชจ๋ ์น๊ฐ์ ์ฌํ ์๋ฅ ์ฌ๋ณธ์ ์ ์ถํ์
์ผ ํฉ๋๋ค. ์ฌ๋ณธ์๋ ์ฌ๊ถ/์ฌํ ์๋ฅ์ ๋ฐ์ดํฐ ํ์ด์ง, ๋น์๊ฐ ์๋ ํ์ด์ง, ์ถ๊ตญ/ํต๊ด ์คํฌํ๊ฐ ์๋ ํ์ด์ง(ํด๋น๋๋ ๊ฒฝ์ฐ)๊ฐ ํฌํจ๋์ด์ผ ํฉ๋๋ค. ์ด๋ฌํ ์
๊ตญ ๊ท์ ์ ์ค์ํ์ง ์์ ๊ฒฝ์ฐ ํญ๊ณต์ฌ์ ์น๊ฐ 1์ธ๋น ์ต๋ 11,250์ ๋ก์ ๋ฒ๊ธ์ด ๋ถ๊ณผ๋ ์ ์์ต๋๋ค.\n\n๋ง์ง๋ง ๋ฌธ์ฅ์์ ๋ณผ ์ ์๋ฏ์ด ํญ๊ณต์ฌ๋ ํ์น ์ ์ ์ฌํ ์๋ฅ๋ฅผ ํ์ธํด์ผ ํ๋ ๊ฐ๋ ฅํ ์ธ์ผํฐ๋ธ๋ฅผ ๊ฐ๊ฒ ๋ฉ๋๋ค.'
},
{
'from': '์ฌ๋',
'value': '์ธ์ฉ๋ ์๋ฃ์ ์ถ์ฒ๋ ๋ฌด์์ธ๊ฐ์?'
},
{
'from': '์ด์์คํดํธ',
'value': '์ถ์ฒ๋ ํญ๊ณต์ ํตํด ํด์ธ๋ก ์ฌํํ๋ ์น๊ฐ์ ์ฌ๊ถ ๋ฐ ๋น์ ์๊ฑด๊ณผ ๊ฐ์ ์๋ฅ ์๊ฑด์ด ํฌํจ๋ ๋ฐ์ดํฐ๋ฒ ์ด์ค์ธ Timatic(์ฌํ ์ ๋ณด ๋งค๋ด์ผ ์๋)์์ ๊ฐ์ ธ์จ ๊ฒ์
๋๋ค. ์์ ๋ต๋ณ์ ๋ชฉ์ ์ง๊ฐ ๋ค๋๋๋์ด๊ณ ๊ท๊ตญ/๊ท๊ตญ ์ฌํ ์ฆ๋น์ด ์์ผ๋ฉฐ 5์ผ๊ฐ ์ฒด๋ฅํ ๊ณํ์ด๋ฉฐ ์ผ๋ ์ฌ๊ถ์ ์ ํจ ๊ธฐ๊ฐ์ด ์ฝ 3๋
๋จ์ ์ผ๋ ์๋ฏผ์ด ์ผ๋์์ ์ง์ ์ฌํํ๋ ์๋๋ฆฌ์ค๋ฅผ ๊ธฐ๋ฐ์ผ๋ก ํ ๊ฒ์
๋๋ค.'
}
]
}
```
### Citation Information
```
@InProceedings{kolimadataset,
title = {KoLIMA: Korean LIMA Dataset for Efficient Instruction-tuning},
author = {Hahn, Taeseung},
year = {2023}
}
``` |
open-llm-leaderboard/details_uukuguy__speechless-code-mistral-orca-7b-v1.0 | ---
pretty_name: Evaluation run of uukuguy/speechless-code-mistral-orca-7b-v1.0
dataset_summary: "Dataset automatically created during the evaluation run of model\
\ [uukuguy/speechless-code-mistral-orca-7b-v1.0](https://huggingface.co/uukuguy/speechless-code-mistral-orca-7b-v1.0)\
\ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\
\nThe dataset is composed of 64 configuration, each one coresponding to one of the\
\ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\
\ found as a specific split in each configuration, the split being named using the\
\ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\
\nAn additional configuration \"results\" store all the aggregated results of the\
\ run (and is used to compute and display the agregated metrics on the [Open LLM\
\ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\
\nTo load the details from a run, you can for instance do the following:\n```python\n\
from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_uukuguy__speechless-code-mistral-orca-7b-v1.0\"\
,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\
These are the [latest results from run 2023-10-24T15:07:12.352820](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-code-mistral-orca-7b-v1.0/blob/main/results_2023-10-24T15-07-12.352820.json)(note\
\ that their might be results for other tasks in the repos if successive evals didn't\
\ cover the same tasks. You find each in the results and the \"latest\" split for\
\ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.4526006711409396,\n\
\ \"em_stderr\": 0.005097407791242309,\n \"f1\": 0.4989010067114103,\n\
\ \"f1_stderr\": 0.004905672332696013,\n \"acc\": 0.42884877867222604,\n\
\ \"acc_stderr\": 0.009659566392137438\n },\n \"harness|drop|3\": {\n\
\ \"em\": 0.4526006711409396,\n \"em_stderr\": 0.005097407791242309,\n\
\ \"f1\": 0.4989010067114103,\n \"f1_stderr\": 0.004905672332696013\n\
\ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.08263836239575435,\n \
\ \"acc_stderr\": 0.0075840892201481476\n },\n \"harness|winogrande|5\"\
: {\n \"acc\": 0.7750591949486977,\n \"acc_stderr\": 0.01173504356412673\n\
\ }\n}\n```"
repo_url: https://huggingface.co/uukuguy/speechless-code-mistral-orca-7b-v1.0
leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
point_of_contact: clementine@hf.co
configs:
- config_name: harness_arc_challenge_25
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|arc:challenge|25_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|arc:challenge|25_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_drop_3
data_files:
- split: 2023_10_24T15_07_12.352820
path:
- '**/details_harness|drop|3_2023-10-24T15-07-12.352820.parquet'
- split: latest
path:
- '**/details_harness|drop|3_2023-10-24T15-07-12.352820.parquet'
- config_name: harness_gsm8k_5
data_files:
- split: 2023_10_24T15_07_12.352820
path:
- '**/details_harness|gsm8k|5_2023-10-24T15-07-12.352820.parquet'
- split: latest
path:
- '**/details_harness|gsm8k|5_2023-10-24T15-07-12.352820.parquet'
- config_name: harness_hellaswag_10
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hellaswag|10_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hellaswag|10_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-international_law|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-management|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-marketing|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-sociology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-virology|5_2023-10-11T06-17-39.611971.parquet'
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_abstract_algebra_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-abstract_algebra|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_anatomy_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-anatomy|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_astronomy_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-astronomy|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_business_ethics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-business_ethics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_clinical_knowledge_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_college_biology_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_biology|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_college_chemistry_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_chemistry|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_college_computer_science_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_computer_science|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_college_mathematics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_college_medicine_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_medicine|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_college_physics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-college_physics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_computer_security_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-computer_security|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_conceptual_physics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-conceptual_physics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_econometrics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-econometrics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_electrical_engineering_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-electrical_engineering|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_elementary_mathematics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_formal_logic_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-formal_logic|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_global_facts_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-global_facts|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_biology_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_biology|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_chemistry_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_computer_science_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_european_history_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_european_history|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_geography_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_geography|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_government_and_politics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_macroeconomics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_mathematics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_microeconomics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_physics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_physics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_psychology_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_psychology|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_statistics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_statistics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_us_history_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_us_history|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_high_school_world_history_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-high_school_world_history|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_human_aging_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_aging|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_human_sexuality_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-human_sexuality|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_international_law_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-international_law|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_jurisprudence_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-jurisprudence|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_logical_fallacies_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-logical_fallacies|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_machine_learning_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-machine_learning|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_management_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-management|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-management|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_marketing_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-marketing|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_medical_genetics_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-medical_genetics|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_miscellaneous_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-miscellaneous|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_moral_disputes_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_disputes|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_moral_scenarios_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-moral_scenarios|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_nutrition_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-nutrition|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_philosophy_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-philosophy|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_prehistory_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-prehistory|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_professional_accounting_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_accounting|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_professional_law_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_law|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_professional_medicine_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_medicine|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_professional_psychology_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-professional_psychology|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_public_relations_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-public_relations|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_security_studies_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-security_studies|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_sociology_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-sociology|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_us_foreign_policy_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_virology_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-virology|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-virology|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_hendrycksTest_world_religions_5
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|hendrycksTest-world_religions|5_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_truthfulqa_mc_0
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- '**/details_harness|truthfulqa:mc|0_2023-10-11T06-17-39.611971.parquet'
- split: latest
path:
- '**/details_harness|truthfulqa:mc|0_2023-10-11T06-17-39.611971.parquet'
- config_name: harness_winogrande_5
data_files:
- split: 2023_10_24T15_07_12.352820
path:
- '**/details_harness|winogrande|5_2023-10-24T15-07-12.352820.parquet'
- split: latest
path:
- '**/details_harness|winogrande|5_2023-10-24T15-07-12.352820.parquet'
- config_name: results
data_files:
- split: 2023_10_11T06_17_39.611971
path:
- results_2023-10-11T06-17-39.611971.parquet
- split: 2023_10_24T15_07_12.352820
path:
- results_2023-10-24T15-07-12.352820.parquet
- split: latest
path:
- results_2023-10-24T15-07-12.352820.parquet
---
# Dataset Card for Evaluation run of uukuguy/speechless-code-mistral-orca-7b-v1.0
## Dataset Description
- **Homepage:**
- **Repository:** https://huggingface.co/uukuguy/speechless-code-mistral-orca-7b-v1.0
- **Paper:**
- **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard
- **Point of Contact:** clementine@hf.co
### Dataset Summary
Dataset automatically created during the evaluation run of model [uukuguy/speechless-code-mistral-orca-7b-v1.0](https://huggingface.co/uukuguy/speechless-code-mistral-orca-7b-v1.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).
The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task.
The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results.
An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).
To load the details from a run, you can for instance do the following:
```python
from datasets import load_dataset
data = load_dataset("open-llm-leaderboard/details_uukuguy__speechless-code-mistral-orca-7b-v1.0",
"harness_winogrande_5",
split="train")
```
## Latest results
These are the [latest results from run 2023-10-24T15:07:12.352820](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-code-mistral-orca-7b-v1.0/blob/main/results_2023-10-24T15-07-12.352820.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval):
```python
{
"all": {
"em": 0.4526006711409396,
"em_stderr": 0.005097407791242309,
"f1": 0.4989010067114103,
"f1_stderr": 0.004905672332696013,
"acc": 0.42884877867222604,
"acc_stderr": 0.009659566392137438
},
"harness|drop|3": {
"em": 0.4526006711409396,
"em_stderr": 0.005097407791242309,
"f1": 0.4989010067114103,
"f1_stderr": 0.004905672332696013
},
"harness|gsm8k|5": {
"acc": 0.08263836239575435,
"acc_stderr": 0.0075840892201481476
},
"harness|winogrande|5": {
"acc": 0.7750591949486977,
"acc_stderr": 0.01173504356412673
}
}
```
### Supported Tasks and Leaderboards
[More Information Needed]
### Languages
[More Information Needed]
## Dataset Structure
### Data Instances
[More Information Needed]
### Data Fields
[More Information Needed]
### Data Splits
[More Information Needed]
## Dataset Creation
### Curation Rationale
[More Information Needed]
### Source Data
#### Initial Data Collection and Normalization
[More Information Needed]
#### Who are the source language producers?
[More Information Needed]
### Annotations
#### Annotation process
[More Information Needed]
#### Who are the annotators?
[More Information Needed]
### Personal and Sensitive Information
[More Information Needed]
## Considerations for Using the Data
### Social Impact of Dataset
[More Information Needed]
### Discussion of Biases
[More Information Needed]
### Other Known Limitations
[More Information Needed]
## Additional Information
### Dataset Curators
[More Information Needed]
### Licensing Information
[More Information Needed]
### Citation Information
[More Information Needed]
### Contributions
[More Information Needed] |
tasksource/blog_authorship_corpus | ---
license: apache-2.0
---
|
GEM-submissions/lewtun__this-is-another-test-name__1655985826 | ---
benchmark: gem
type: prediction
submission_name: This is another test name
tags:
- evaluation
- benchmark
---
# GEM Submission
Submission name: This is another test name
|
heliosprime/twitter_dataset_1713218513 | ---
dataset_info:
features:
- name: id
dtype: string
- name: tweet_content
dtype: string
- name: user_name
dtype: string
- name: user_id
dtype: string
- name: created_at
dtype: string
- name: url
dtype: string
- name: favourite_count
dtype: int64
- name: scraped_at
dtype: string
- name: image_urls
dtype: string
splits:
- name: train
num_bytes: 21337
num_examples: 60
download_size: 19759
dataset_size: 21337
configs:
- config_name: default
data_files:
- split: train
path: data/train-*
---
# Dataset Card for "twitter_dataset_1713218513"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Saxo/linkbricks_ko_dataset_stem_2 | ---
license: apache-2.0
---
|
ibm/Wish-QA-ELI5-Falcon | ---
dataset_info:
features:
- name: 'Unnamed: 0'
dtype: float64
- name: id
dtype: string
- name: title_question
dtype: string
- name: answer
dtype: string
- name: passage_1
dtype: string
- name: passage_2
dtype: string
- name: passage_3
dtype: string
- name: text
dtype: string
- name: qa
dtype: string
- name: question
dtype: string
- name: doc_score
dtype: float64
- name: score_qa
dtype: float64
- name: old_question
dtype: string
- name: old_answer
dtype: string
- name: ans_num_words
dtype: int64
- name: text_num_words
dtype: int64
- name: text_longer_1.5
dtype: int64
- name: input
dtype: string
- name: output 0 answer
dtype: string
splits:
- name: train
num_bytes: 53884366
num_examples: 10000
download_size: 32253671
dataset_size: 53884366
---
# Dataset Card for "Wish-QA-ELI5-Falcon"
[More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.