id
stringlengths
2
115
lastModified
stringlengths
24
24
tags
list
author
stringlengths
2
42
description
stringlengths
0
68.7k
citation
stringlengths
0
10.7k
cardData
null
likes
int64
0
3.55k
downloads
int64
0
10.1M
card
stringlengths
0
1.01M
stvnchnsn/data_chat_with_my_experience
2023-09-01T21:17:32.000Z
[ "region:us" ]
stvnchnsn
null
null
null
0
0
Entry not found
ITDSCON/webi
2023-09-01T21:55:01.000Z
[ "region:us" ]
ITDSCON
null
null
null
0
0
Entry not found
yzhuang/autotree_snnxor_n0_l2_2
2023-09-01T21:40:40.000Z
[ "region:us" ]
yzhuang
null
null
null
0
0
--- dataset_info: features: - name: id dtype: int64 - name: input_x sequence: sequence: float32 - name: input_y sequence: sequence: float32 - name: rtg sequence: float64 - name: status sequence: sequence: float32 - name: split_threshold sequence: sequence: float32 - name: split_dimension sequence: int64 splits: - name: train num_bytes: 371440000 num_examples: 10000 - name: validation num_bytes: 371440000 num_examples: 10000 - name: test num_bytes: 371440000 num_examples: 10000 download_size: 349774676 dataset_size: 1114320000 --- # Dataset Card for "autotree_snnxor_n0_l2_2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
JWBickel/FalseGospels
2023-09-05T18:54:41.000Z
[ "region:us" ]
JWBickel
null
null
null
0
0
configs: - config_name: default data_files: - split: train path: "train.csv" - split: test path: "test.csv"
yzhuang/autotree_snnxor_n0_l1_2
2023-09-01T21:58:23.000Z
[ "region:us" ]
yzhuang
null
null
null
0
0
--- dataset_info: features: - name: id dtype: int64 - name: input_x sequence: sequence: float32 - name: input_y sequence: sequence: float32 - name: rtg sequence: float64 - name: status sequence: sequence: float32 - name: split_threshold sequence: sequence: float32 - name: split_dimension sequence: int64 splits: - name: train num_bytes: 123760000 num_examples: 10000 - name: validation num_bytes: 123760000 num_examples: 10000 - name: test num_bytes: 123760000 num_examples: 10000 download_size: 183536639 dataset_size: 371280000 --- # Dataset Card for "autotree_snnxor_n0_l1_2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/autotree_snnxor_n30_l1_2
2023-09-05T18:39:38.000Z
[ "region:us" ]
yzhuang
null
null
null
0
0
--- dataset_info: features: - name: id dtype: int64 - name: input_x sequence: sequence: float32 - name: input_y sequence: sequence: float32 - name: input_y_clean sequence: sequence: float32 - name: rtg sequence: float64 - name: status sequence: sequence: float32 - name: split_threshold sequence: sequence: float32 - name: split_dimension sequence: int64 splits: - name: train num_bytes: 154520000 num_examples: 10000 - name: validation num_bytes: 154520000 num_examples: 10000 - name: test num_bytes: 154520000 num_examples: 10000 download_size: 185693440 dataset_size: 463560000 --- # Dataset Card for "autotree_snnxor_n30_l1_2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/autotree_snnxor_n15_l2_2
2023-09-05T19:59:23.000Z
[ "region:us" ]
yzhuang
null
null
null
0
0
--- dataset_info: features: - name: id dtype: int64 - name: input_x sequence: sequence: float32 - name: input_y sequence: sequence: float32 - name: input_y_clean sequence: sequence: float32 - name: rtg sequence: float64 - name: status sequence: sequence: float32 - name: split_threshold sequence: sequence: float32 - name: split_dimension sequence: int64 splits: - name: train num_bytes: 402200000 num_examples: 10000 - name: validation num_bytes: 402200000 num_examples: 10000 - name: test num_bytes: 402200000 num_examples: 10000 download_size: 351932552 dataset_size: 1206600000 --- # Dataset Card for "autotree_snnxor_n15_l2_2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
yzhuang/autotree_snnxor_n30_l2_2
2023-09-05T20:04:39.000Z
[ "region:us" ]
yzhuang
null
null
null
0
0
--- dataset_info: features: - name: id dtype: int64 - name: input_x sequence: sequence: float32 - name: input_y sequence: sequence: float32 - name: input_y_clean sequence: sequence: float32 - name: rtg sequence: float64 - name: status sequence: sequence: float32 - name: split_threshold sequence: sequence: float32 - name: split_dimension sequence: int64 splits: - name: train num_bytes: 402200000 num_examples: 10000 - name: validation num_bytes: 402200000 num_examples: 10000 - name: test num_bytes: 402200000 num_examples: 10000 download_size: 351933707 dataset_size: 1206600000 --- # Dataset Card for "autotree_snnxor_n30_l2_2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Roscall/MJW
2023-09-01T22:33:08.000Z
[ "region:us" ]
Roscall
null
null
null
0
0
Entry not found
gonzalobenegas/example_dataset
2023-09-01T23:23:17.000Z
[ "region:us" ]
gonzalobenegas
null
null
null
0
0
Entry not found
MrezaPRZ/GPT-fine-tuning-spider
2023-09-01T23:49:14.000Z
[ "license:apache-2.0", "region:us" ]
MrezaPRZ
null
null
null
0
0
--- license: apache-2.0 ---
Mateus1223445/devilmel
2023-09-02T00:06:28.000Z
[ "region:us" ]
Mateus1223445
null
null
null
0
0
Entry not found
open-llm-leaderboard/details_uukuguy__speechless-llama2-hermes-orca-platypus-wizardlm-13b
2023-09-12T15:48:20.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b](https://huggingface.co/uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_uukuguy__speechless-llama2-hermes-orca-platypus-wizardlm-13b\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-02T00:07:11.850382](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-llama2-hermes-orca-platypus-wizardlm-13b/blob/main/results_2023-09-02T00%3A07%3A11.850382.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5833895996915851,\n\ \ \"acc_stderr\": 0.03412386866499071,\n \"acc_norm\": 0.5873500122940513,\n\ \ \"acc_norm_stderr\": 0.03410341562269808,\n \"mc1\": 0.3880048959608323,\n\ \ \"mc1_stderr\": 0.017058761501347972,\n \"mc2\": 0.5599582588635479,\n\ \ \"mc2_stderr\": 0.015546326272126147\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5656996587030717,\n \"acc_stderr\": 0.014484703048857357,\n\ \ \"acc_norm\": 0.5964163822525598,\n \"acc_norm_stderr\": 0.014337158914268448\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6240788687512447,\n\ \ \"acc_stderr\": 0.004833699243292347,\n \"acc_norm\": 0.8270264887472615,\n\ \ \"acc_norm_stderr\": 0.003774513882615949\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\ \ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5407407407407407,\n\ \ \"acc_stderr\": 0.04304979692464242,\n \"acc_norm\": 0.5407407407407407,\n\ \ \"acc_norm_stderr\": 0.04304979692464242\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5855263157894737,\n \"acc_stderr\": 0.04008973785779206,\n\ \ \"acc_norm\": 0.5855263157894737,\n \"acc_norm_stderr\": 0.04008973785779206\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n\ \ \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \ \ \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6264150943396226,\n \"acc_stderr\": 0.02977308271331987,\n\ \ \"acc_norm\": 0.6264150943396226,\n \"acc_norm_stderr\": 0.02977308271331987\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6597222222222222,\n\ \ \"acc_stderr\": 0.039621355734862175,\n \"acc_norm\": 0.6597222222222222,\n\ \ \"acc_norm_stderr\": 0.039621355734862175\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n\ \ \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5491329479768786,\n\ \ \"acc_stderr\": 0.037940126746970296,\n \"acc_norm\": 0.5491329479768786,\n\ \ \"acc_norm_stderr\": 0.037940126746970296\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.04440521906179328,\n\ \ \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.04440521906179328\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n\ \ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.502127659574468,\n \"acc_stderr\": 0.03268572658667492,\n\ \ \"acc_norm\": 0.502127659574468,\n \"acc_norm_stderr\": 0.03268572658667492\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2807017543859649,\n\ \ \"acc_stderr\": 0.042270544512322,\n \"acc_norm\": 0.2807017543859649,\n\ \ \"acc_norm_stderr\": 0.042270544512322\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n\ \ \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.35185185185185186,\n \"acc_stderr\": 0.024594975128920938,\n \"\ acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.024594975128920938\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3968253968253968,\n\ \ \"acc_stderr\": 0.04375888492727061,\n \"acc_norm\": 0.3968253968253968,\n\ \ \"acc_norm_stderr\": 0.04375888492727061\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.667741935483871,\n \"acc_stderr\": 0.026795560848122794,\n \"\ acc_norm\": 0.667741935483871,\n \"acc_norm_stderr\": 0.026795560848122794\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"\ acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\"\ : 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.03546563019624336,\n\ \ \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.03546563019624336\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7474747474747475,\n \"acc_stderr\": 0.030954055470365897,\n \"\ acc_norm\": 0.7474747474747475,\n \"acc_norm_stderr\": 0.030954055470365897\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8549222797927462,\n \"acc_stderr\": 0.025416343096306433,\n\ \ \"acc_norm\": 0.8549222797927462,\n \"acc_norm_stderr\": 0.025416343096306433\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5871794871794872,\n \"acc_stderr\": 0.024962683564331806,\n\ \ \"acc_norm\": 0.5871794871794872,\n \"acc_norm_stderr\": 0.024962683564331806\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.32592592592592595,\n \"acc_stderr\": 0.028578348365473075,\n \ \ \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.028578348365473075\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.6134453781512605,\n \"acc_stderr\": 0.03163145807552379,\n \ \ \"acc_norm\": 0.6134453781512605,\n \"acc_norm_stderr\": 0.03163145807552379\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\"\ : 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n\ \ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7889908256880734,\n\ \ \"acc_stderr\": 0.01749392240411265,\n \"acc_norm\": 0.7889908256880734,\n\ \ \"acc_norm_stderr\": 0.01749392240411265\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\ : {\n \"acc\": 0.4074074074074074,\n \"acc_stderr\": 0.03350991604696043,\n\ \ \"acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.03350991604696043\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.8137254901960784,\n \"acc_stderr\": 0.027325470966716312,\n \"\ acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.027325470966716312\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7679324894514767,\n \"acc_stderr\": 0.027479744550808514,\n \ \ \"acc_norm\": 0.7679324894514767,\n \"acc_norm_stderr\": 0.027479744550808514\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n\ \ \"acc_stderr\": 0.03138147637575498,\n \"acc_norm\": 0.6771300448430493,\n\ \ \"acc_norm_stderr\": 0.03138147637575498\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n\ \ \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7024793388429752,\n \"acc_stderr\": 0.04173349148083499,\n \"\ acc_norm\": 0.7024793388429752,\n \"acc_norm_stderr\": 0.04173349148083499\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n\ \ \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.7592592592592593,\n\ \ \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6809815950920245,\n \"acc_stderr\": 0.03661997551073836,\n\ \ \"acc_norm\": 0.6809815950920245,\n \"acc_norm_stderr\": 0.03661997551073836\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n\ \ \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \ \ \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n\ \ \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8205128205128205,\n\ \ \"acc_stderr\": 0.025140935950335445,\n \"acc_norm\": 0.8205128205128205,\n\ \ \"acc_norm_stderr\": 0.025140935950335445\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001974,\n \ \ \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001974\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7650063856960408,\n\ \ \"acc_stderr\": 0.015162024152278446,\n \"acc_norm\": 0.7650063856960408,\n\ \ \"acc_norm_stderr\": 0.015162024152278446\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.02541600377316555,\n\ \ \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.02541600377316555\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.41787709497206704,\n\ \ \"acc_stderr\": 0.01649540063582008,\n \"acc_norm\": 0.41787709497206704,\n\ \ \"acc_norm_stderr\": 0.01649540063582008\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.02736359328468497,\n\ \ \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.02736359328468497\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6430868167202572,\n\ \ \"acc_stderr\": 0.027210420375934023,\n \"acc_norm\": 0.6430868167202572,\n\ \ \"acc_norm_stderr\": 0.027210420375934023\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6481481481481481,\n \"acc_stderr\": 0.026571483480719964,\n\ \ \"acc_norm\": 0.6481481481481481,\n \"acc_norm_stderr\": 0.026571483480719964\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.4574468085106383,\n \"acc_stderr\": 0.02971928127223684,\n \ \ \"acc_norm\": 0.4574468085106383,\n \"acc_norm_stderr\": 0.02971928127223684\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4406779661016949,\n\ \ \"acc_stderr\": 0.012680037994097067,\n \"acc_norm\": 0.4406779661016949,\n\ \ \"acc_norm_stderr\": 0.012680037994097067\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5882352941176471,\n \"acc_stderr\": 0.02989616303312547,\n\ \ \"acc_norm\": 0.5882352941176471,\n \"acc_norm_stderr\": 0.02989616303312547\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5800653594771242,\n \"acc_stderr\": 0.019966811178256483,\n \ \ \"acc_norm\": 0.5800653594771242,\n \"acc_norm_stderr\": 0.019966811178256483\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n\ \ \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n\ \ \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6489795918367347,\n \"acc_stderr\": 0.03055531675557364,\n\ \ \"acc_norm\": 0.6489795918367347,\n \"acc_norm_stderr\": 0.03055531675557364\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7611940298507462,\n\ \ \"acc_stderr\": 0.03014777593540922,\n \"acc_norm\": 0.7611940298507462,\n\ \ \"acc_norm_stderr\": 0.03014777593540922\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \ \ \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n\ \ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4819277108433735,\n\ \ \"acc_stderr\": 0.038899512528272166,\n \"acc_norm\": 0.4819277108433735,\n\ \ \"acc_norm_stderr\": 0.038899512528272166\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7602339181286549,\n \"acc_stderr\": 0.03274485211946956,\n\ \ \"acc_norm\": 0.7602339181286549,\n \"acc_norm_stderr\": 0.03274485211946956\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3880048959608323,\n\ \ \"mc1_stderr\": 0.017058761501347972,\n \"mc2\": 0.5599582588635479,\n\ \ \"mc2_stderr\": 0.015546326272126147\n }\n}\n```" repo_url: https://huggingface.co/uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|arc:challenge|25_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hellaswag|10_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T00:07:11.850382.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T00:07:11.850382.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T00_07_11.850382 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T00:07:11.850382.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T00:07:11.850382.parquet' - config_name: results data_files: - split: 2023_09_02T00_07_11.850382 path: - results_2023-09-02T00:07:11.850382.parquet - split: latest path: - results_2023-09-02T00:07:11.850382.parquet --- # Dataset Card for Evaluation run of uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b](https://huggingface.co/uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_uukuguy__speechless-llama2-hermes-orca-platypus-wizardlm-13b", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-02T00:07:11.850382](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-llama2-hermes-orca-platypus-wizardlm-13b/blob/main/results_2023-09-02T00%3A07%3A11.850382.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5833895996915851, "acc_stderr": 0.03412386866499071, "acc_norm": 0.5873500122940513, "acc_norm_stderr": 0.03410341562269808, "mc1": 0.3880048959608323, "mc1_stderr": 0.017058761501347972, "mc2": 0.5599582588635479, "mc2_stderr": 0.015546326272126147 }, "harness|arc:challenge|25": { "acc": 0.5656996587030717, "acc_stderr": 0.014484703048857357, "acc_norm": 0.5964163822525598, "acc_norm_stderr": 0.014337158914268448 }, "harness|hellaswag|10": { "acc": 0.6240788687512447, "acc_stderr": 0.004833699243292347, "acc_norm": 0.8270264887472615, "acc_norm_stderr": 0.003774513882615949 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5407407407407407, "acc_stderr": 0.04304979692464242, "acc_norm": 0.5407407407407407, "acc_norm_stderr": 0.04304979692464242 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5855263157894737, "acc_stderr": 0.04008973785779206, "acc_norm": 0.5855263157894737, "acc_norm_stderr": 0.04008973785779206 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6264150943396226, "acc_stderr": 0.02977308271331987, "acc_norm": 0.6264150943396226, "acc_norm_stderr": 0.02977308271331987 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6597222222222222, "acc_stderr": 0.039621355734862175, "acc_norm": 0.6597222222222222, "acc_norm_stderr": 0.039621355734862175 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5491329479768786, "acc_stderr": 0.037940126746970296, "acc_norm": 0.5491329479768786, "acc_norm_stderr": 0.037940126746970296 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.04440521906179328, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.04440521906179328 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.502127659574468, "acc_stderr": 0.03268572658667492, "acc_norm": 0.502127659574468, "acc_norm_stderr": 0.03268572658667492 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2807017543859649, "acc_stderr": 0.042270544512322, "acc_norm": 0.2807017543859649, "acc_norm_stderr": 0.042270544512322 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.024594975128920938, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.024594975128920938 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3968253968253968, "acc_stderr": 0.04375888492727061, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.04375888492727061 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.667741935483871, "acc_stderr": 0.026795560848122794, "acc_norm": 0.667741935483871, "acc_norm_stderr": 0.026795560848122794 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7090909090909091, "acc_stderr": 0.03546563019624336, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.03546563019624336 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7474747474747475, "acc_stderr": 0.030954055470365897, "acc_norm": 0.7474747474747475, "acc_norm_stderr": 0.030954055470365897 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8549222797927462, "acc_stderr": 0.025416343096306433, "acc_norm": 0.8549222797927462, "acc_norm_stderr": 0.025416343096306433 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5871794871794872, "acc_stderr": 0.024962683564331806, "acc_norm": 0.5871794871794872, "acc_norm_stderr": 0.024962683564331806 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.028578348365473075, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.028578348365473075 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6134453781512605, "acc_stderr": 0.03163145807552379, "acc_norm": 0.6134453781512605, "acc_norm_stderr": 0.03163145807552379 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7889908256880734, "acc_stderr": 0.01749392240411265, "acc_norm": 0.7889908256880734, "acc_norm_stderr": 0.01749392240411265 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.03350991604696043, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.03350991604696043 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8137254901960784, "acc_stderr": 0.027325470966716312, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.027325470966716312 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7679324894514767, "acc_stderr": 0.027479744550808514, "acc_norm": 0.7679324894514767, "acc_norm_stderr": 0.027479744550808514 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575498, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575498 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6259541984732825, "acc_stderr": 0.042438692422305246, "acc_norm": 0.6259541984732825, "acc_norm_stderr": 0.042438692422305246 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7024793388429752, "acc_stderr": 0.04173349148083499, "acc_norm": 0.7024793388429752, "acc_norm_stderr": 0.04173349148083499 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243839, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243839 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6809815950920245, "acc_stderr": 0.03661997551073836, "acc_norm": 0.6809815950920245, "acc_norm_stderr": 0.03661997551073836 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8205128205128205, "acc_stderr": 0.025140935950335445, "acc_norm": 0.8205128205128205, "acc_norm_stderr": 0.025140935950335445 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001974, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7650063856960408, "acc_stderr": 0.015162024152278446, "acc_norm": 0.7650063856960408, "acc_norm_stderr": 0.015162024152278446 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6647398843930635, "acc_stderr": 0.02541600377316555, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.02541600377316555 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.41787709497206704, "acc_stderr": 0.01649540063582008, "acc_norm": 0.41787709497206704, "acc_norm_stderr": 0.01649540063582008 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6470588235294118, "acc_stderr": 0.02736359328468497, "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.02736359328468497 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6430868167202572, "acc_stderr": 0.027210420375934023, "acc_norm": 0.6430868167202572, "acc_norm_stderr": 0.027210420375934023 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6481481481481481, "acc_stderr": 0.026571483480719964, "acc_norm": 0.6481481481481481, "acc_norm_stderr": 0.026571483480719964 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4574468085106383, "acc_stderr": 0.02971928127223684, "acc_norm": 0.4574468085106383, "acc_norm_stderr": 0.02971928127223684 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4406779661016949, "acc_stderr": 0.012680037994097067, "acc_norm": 0.4406779661016949, "acc_norm_stderr": 0.012680037994097067 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5882352941176471, "acc_stderr": 0.02989616303312547, "acc_norm": 0.5882352941176471, "acc_norm_stderr": 0.02989616303312547 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5800653594771242, "acc_stderr": 0.019966811178256483, "acc_norm": 0.5800653594771242, "acc_norm_stderr": 0.019966811178256483 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6489795918367347, "acc_stderr": 0.03055531675557364, "acc_norm": 0.6489795918367347, "acc_norm_stderr": 0.03055531675557364 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7611940298507462, "acc_stderr": 0.03014777593540922, "acc_norm": 0.7611940298507462, "acc_norm_stderr": 0.03014777593540922 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-virology|5": { "acc": 0.4819277108433735, "acc_stderr": 0.038899512528272166, "acc_norm": 0.4819277108433735, "acc_norm_stderr": 0.038899512528272166 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7602339181286549, "acc_stderr": 0.03274485211946956, "acc_norm": 0.7602339181286549, "acc_norm_stderr": 0.03274485211946956 }, "harness|truthfulqa:mc|0": { "mc1": 0.3880048959608323, "mc1_stderr": 0.017058761501347972, "mc2": 0.5599582588635479, "mc2_stderr": 0.015546326272126147 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
yzhuang/autotree_automl_house_16H_gosdt_l512_d3_sd3
2023-09-02T00:30:55.000Z
[ "region:us" ]
yzhuang
null
null
null
0
0
--- dataset_info: features: - name: id dtype: int64 - name: input_x sequence: sequence: float64 - name: input_y sequence: sequence: float32 - name: rtg sequence: float64 - name: status sequence: sequence: float32 - name: split_threshold sequence: sequence: float64 - name: split_dimension sequence: int64 splits: - name: train num_bytes: 9224800000 num_examples: 100000 - name: validation num_bytes: 922480000 num_examples: 10000 download_size: 3198840988 dataset_size: 10147280000 --- # Dataset Card for "autotree_automl_house_16H_gosdt_l512_d3_sd3" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
open-llm-leaderboard/details_acrastt__Bean-3B
2023-09-02T01:15:16.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of acrastt/Bean-3B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [acrastt/Bean-3B](https://huggingface.co/acrastt/Bean-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_acrastt__Bean-3B\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-02T01:13:58.991012](https://huggingface.co/datasets/open-llm-leaderboard/details_acrastt__Bean-3B/blob/main/results_2023-09-02T01%3A13%3A58.991012.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2706803436804621,\n\ \ \"acc_stderr\": 0.03202855888486129,\n \"acc_norm\": 0.2743721240247319,\n\ \ \"acc_norm_stderr\": 0.032024224713521385,\n \"mc1\": 0.2386780905752754,\n\ \ \"mc1_stderr\": 0.014922629695456418,\n \"mc2\": 0.3611141228461351,\n\ \ \"mc2_stderr\": 0.01348753730197199\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.36860068259385664,\n \"acc_stderr\": 0.014097810678042184,\n\ \ \"acc_norm\": 0.4035836177474403,\n \"acc_norm_stderr\": 0.01433715891426844\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5371439952200757,\n\ \ \"acc_stderr\": 0.0049759937955620335,\n \"acc_norm\": 0.7199761003784106,\n\ \ \"acc_norm_stderr\": 0.004480929450281564\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.23703703703703705,\n\ \ \"acc_stderr\": 0.03673731683969506,\n \"acc_norm\": 0.23703703703703705,\n\ \ \"acc_norm_stderr\": 0.03673731683969506\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.27631578947368424,\n \"acc_stderr\": 0.03639057569952924,\n\ \ \"acc_norm\": 0.27631578947368424,\n \"acc_norm_stderr\": 0.03639057569952924\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.26,\n\ \ \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.26,\n \ \ \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.29056603773584905,\n \"acc_stderr\": 0.02794321998933713,\n\ \ \"acc_norm\": 0.29056603773584905,\n \"acc_norm_stderr\": 0.02794321998933713\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2222222222222222,\n\ \ \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.2222222222222222,\n\ \ \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.19,\n \"acc_stderr\": 0.03942772444036623,\n \ \ \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.03942772444036623\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.22,\n \"acc_stderr\": 0.0416333199893227,\n \"acc_norm\": 0.22,\n\ \ \"acc_norm_stderr\": 0.0416333199893227\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\ \ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.19653179190751446,\n\ \ \"acc_stderr\": 0.030299574664788147,\n \"acc_norm\": 0.19653179190751446,\n\ \ \"acc_norm_stderr\": 0.030299574664788147\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.17647058823529413,\n \"acc_stderr\": 0.03793281185307811,\n\ \ \"acc_norm\": 0.17647058823529413,\n \"acc_norm_stderr\": 0.03793281185307811\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n\ \ \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.31063829787234043,\n \"acc_stderr\": 0.030251237579213174,\n\ \ \"acc_norm\": 0.31063829787234043,\n \"acc_norm_stderr\": 0.030251237579213174\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.22807017543859648,\n\ \ \"acc_stderr\": 0.03947152782669416,\n \"acc_norm\": 0.22807017543859648,\n\ \ \"acc_norm_stderr\": 0.03947152782669416\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.22758620689655173,\n \"acc_stderr\": 0.03493950380131184,\n\ \ \"acc_norm\": 0.22758620689655173,\n \"acc_norm_stderr\": 0.03493950380131184\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.2698412698412698,\n \"acc_stderr\": 0.022860838309232072,\n \"\ acc_norm\": 0.2698412698412698,\n \"acc_norm_stderr\": 0.022860838309232072\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23809523809523808,\n\ \ \"acc_stderr\": 0.038095238095238106,\n \"acc_norm\": 0.23809523809523808,\n\ \ \"acc_norm_stderr\": 0.038095238095238106\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.2709677419354839,\n\ \ \"acc_stderr\": 0.025284416114900156,\n \"acc_norm\": 0.2709677419354839,\n\ \ \"acc_norm_stderr\": 0.025284416114900156\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.29064039408866993,\n \"acc_stderr\": 0.03194740072265541,\n\ \ \"acc_norm\": 0.29064039408866993,\n \"acc_norm_stderr\": 0.03194740072265541\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\"\ : 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.23636363636363636,\n \"acc_stderr\": 0.033175059300091805,\n\ \ \"acc_norm\": 0.23636363636363636,\n \"acc_norm_stderr\": 0.033175059300091805\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.23737373737373738,\n \"acc_stderr\": 0.03031371053819889,\n \"\ acc_norm\": 0.23737373737373738,\n \"acc_norm_stderr\": 0.03031371053819889\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.22797927461139897,\n \"acc_stderr\": 0.030276909945178274,\n\ \ \"acc_norm\": 0.22797927461139897,\n \"acc_norm_stderr\": 0.030276909945178274\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.2358974358974359,\n \"acc_stderr\": 0.02152596540740872,\n \ \ \"acc_norm\": 0.2358974358974359,\n \"acc_norm_stderr\": 0.02152596540740872\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.25925925925925924,\n \"acc_stderr\": 0.026719240783712163,\n \ \ \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.026719240783712163\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.02865749128507199,\n \ \ \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.02865749128507199\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\"\ : 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n\ \ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.26238532110091745,\n\ \ \"acc_stderr\": 0.01886188502153473,\n \"acc_norm\": 0.26238532110091745,\n\ \ \"acc_norm_stderr\": 0.01886188502153473\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\ : {\n \"acc\": 0.18981481481481483,\n \"acc_stderr\": 0.026744714834691936,\n\ \ \"acc_norm\": 0.18981481481481483,\n \"acc_norm_stderr\": 0.026744714834691936\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.22058823529411764,\n \"acc_stderr\": 0.029102254389674082,\n \"\ acc_norm\": 0.22058823529411764,\n \"acc_norm_stderr\": 0.029102254389674082\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.24472573839662448,\n \"acc_stderr\": 0.027985699387036423,\n \ \ \"acc_norm\": 0.24472573839662448,\n \"acc_norm_stderr\": 0.027985699387036423\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.4080717488789238,\n\ \ \"acc_stderr\": 0.03298574607842821,\n \"acc_norm\": 0.4080717488789238,\n\ \ \"acc_norm_stderr\": 0.03298574607842821\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.25190839694656486,\n \"acc_stderr\": 0.03807387116306086,\n\ \ \"acc_norm\": 0.25190839694656486,\n \"acc_norm_stderr\": 0.03807387116306086\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.35537190082644626,\n \"acc_stderr\": 0.04369236326573981,\n \"\ acc_norm\": 0.35537190082644626,\n \"acc_norm_stderr\": 0.04369236326573981\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.3055555555555556,\n\ \ \"acc_stderr\": 0.04453197507374984,\n \"acc_norm\": 0.3055555555555556,\n\ \ \"acc_norm_stderr\": 0.04453197507374984\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.2392638036809816,\n \"acc_stderr\": 0.033519538795212696,\n\ \ \"acc_norm\": 0.2392638036809816,\n \"acc_norm_stderr\": 0.033519538795212696\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n\ \ \"acc_stderr\": 0.04203277291467763,\n \"acc_norm\": 0.26785714285714285,\n\ \ \"acc_norm_stderr\": 0.04203277291467763\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.30097087378640774,\n \"acc_stderr\": 0.04541609446503947,\n\ \ \"acc_norm\": 0.30097087378640774,\n \"acc_norm_stderr\": 0.04541609446503947\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2692307692307692,\n\ \ \"acc_stderr\": 0.029058588303748842,\n \"acc_norm\": 0.2692307692307692,\n\ \ \"acc_norm_stderr\": 0.029058588303748842\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.3052362707535121,\n\ \ \"acc_stderr\": 0.01646771194763513,\n \"acc_norm\": 0.3052362707535121,\n\ \ \"acc_norm_stderr\": 0.01646771194763513\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.2861271676300578,\n \"acc_stderr\": 0.024332146779134124,\n\ \ \"acc_norm\": 0.2861271676300578,\n \"acc_norm_stderr\": 0.024332146779134124\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23687150837988827,\n\ \ \"acc_stderr\": 0.014219570788103982,\n \"acc_norm\": 0.23687150837988827,\n\ \ \"acc_norm_stderr\": 0.014219570788103982\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.025261691219729484,\n\ \ \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.025261691219729484\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.29260450160771706,\n\ \ \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.29260450160771706,\n\ \ \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.26851851851851855,\n \"acc_stderr\": 0.024659685185967277,\n\ \ \"acc_norm\": 0.26851851851851855,\n \"acc_norm_stderr\": 0.024659685185967277\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2730496453900709,\n \"acc_stderr\": 0.026577860943307857,\n \ \ \"acc_norm\": 0.2730496453900709,\n \"acc_norm_stderr\": 0.026577860943307857\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.242503259452412,\n\ \ \"acc_stderr\": 0.010946570966348768,\n \"acc_norm\": 0.242503259452412,\n\ \ \"acc_norm_stderr\": 0.010946570966348768\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.1948529411764706,\n \"acc_stderr\": 0.024060599423487414,\n\ \ \"acc_norm\": 0.1948529411764706,\n \"acc_norm_stderr\": 0.024060599423487414\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.272875816993464,\n \"acc_stderr\": 0.01802047414839358,\n \ \ \"acc_norm\": 0.272875816993464,\n \"acc_norm_stderr\": 0.01802047414839358\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.33636363636363636,\n\ \ \"acc_stderr\": 0.04525393596302505,\n \"acc_norm\": 0.33636363636363636,\n\ \ \"acc_norm_stderr\": 0.04525393596302505\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.3142857142857143,\n \"acc_stderr\": 0.029719329422417468,\n\ \ \"acc_norm\": 0.3142857142857143,\n \"acc_norm_stderr\": 0.029719329422417468\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.2736318407960199,\n\ \ \"acc_stderr\": 0.031524391865554016,\n \"acc_norm\": 0.2736318407960199,\n\ \ \"acc_norm_stderr\": 0.031524391865554016\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3192771084337349,\n\ \ \"acc_stderr\": 0.0362933532994786,\n \"acc_norm\": 0.3192771084337349,\n\ \ \"acc_norm_stderr\": 0.0362933532994786\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.28654970760233917,\n \"acc_stderr\": 0.034678266857038266,\n\ \ \"acc_norm\": 0.28654970760233917,\n \"acc_norm_stderr\": 0.034678266857038266\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2386780905752754,\n\ \ \"mc1_stderr\": 0.014922629695456418,\n \"mc2\": 0.3611141228461351,\n\ \ \"mc2_stderr\": 0.01348753730197199\n }\n}\n```" repo_url: https://huggingface.co/acrastt/Bean-3B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|arc:challenge|25_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hellaswag|10_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T01:13:58.991012.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T01:13:58.991012.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T01_13_58.991012 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T01:13:58.991012.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T01:13:58.991012.parquet' - config_name: results data_files: - split: 2023_09_02T01_13_58.991012 path: - results_2023-09-02T01:13:58.991012.parquet - split: latest path: - results_2023-09-02T01:13:58.991012.parquet --- # Dataset Card for Evaluation run of acrastt/Bean-3B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/acrastt/Bean-3B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [acrastt/Bean-3B](https://huggingface.co/acrastt/Bean-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_acrastt__Bean-3B", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-02T01:13:58.991012](https://huggingface.co/datasets/open-llm-leaderboard/details_acrastt__Bean-3B/blob/main/results_2023-09-02T01%3A13%3A58.991012.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2706803436804621, "acc_stderr": 0.03202855888486129, "acc_norm": 0.2743721240247319, "acc_norm_stderr": 0.032024224713521385, "mc1": 0.2386780905752754, "mc1_stderr": 0.014922629695456418, "mc2": 0.3611141228461351, "mc2_stderr": 0.01348753730197199 }, "harness|arc:challenge|25": { "acc": 0.36860068259385664, "acc_stderr": 0.014097810678042184, "acc_norm": 0.4035836177474403, "acc_norm_stderr": 0.01433715891426844 }, "harness|hellaswag|10": { "acc": 0.5371439952200757, "acc_stderr": 0.0049759937955620335, "acc_norm": 0.7199761003784106, "acc_norm_stderr": 0.004480929450281564 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.23703703703703705, "acc_stderr": 0.03673731683969506, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.03673731683969506 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.27631578947368424, "acc_stderr": 0.03639057569952924, "acc_norm": 0.27631578947368424, "acc_norm_stderr": 0.03639057569952924 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.29056603773584905, "acc_stderr": 0.02794321998933713, "acc_norm": 0.29056603773584905, "acc_norm_stderr": 0.02794321998933713 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.03476590104304134, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.19, "acc_stderr": 0.03942772444036623, "acc_norm": 0.19, "acc_norm_stderr": 0.03942772444036623 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.22, "acc_stderr": 0.0416333199893227, "acc_norm": 0.22, "acc_norm_stderr": 0.0416333199893227 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.19653179190751446, "acc_stderr": 0.030299574664788147, "acc_norm": 0.19653179190751446, "acc_norm_stderr": 0.030299574664788147 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.17647058823529413, "acc_stderr": 0.03793281185307811, "acc_norm": 0.17647058823529413, "acc_norm_stderr": 0.03793281185307811 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.31063829787234043, "acc_stderr": 0.030251237579213174, "acc_norm": 0.31063829787234043, "acc_norm_stderr": 0.030251237579213174 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669416, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669416 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.22758620689655173, "acc_stderr": 0.03493950380131184, "acc_norm": 0.22758620689655173, "acc_norm_stderr": 0.03493950380131184 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2698412698412698, "acc_stderr": 0.022860838309232072, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.022860838309232072 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23809523809523808, "acc_stderr": 0.038095238095238106, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.038095238095238106 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.2709677419354839, "acc_stderr": 0.025284416114900156, "acc_norm": 0.2709677419354839, "acc_norm_stderr": 0.025284416114900156 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.29064039408866993, "acc_stderr": 0.03194740072265541, "acc_norm": 0.29064039408866993, "acc_norm_stderr": 0.03194740072265541 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.23636363636363636, "acc_stderr": 0.033175059300091805, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.033175059300091805 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.23737373737373738, "acc_stderr": 0.03031371053819889, "acc_norm": 0.23737373737373738, "acc_norm_stderr": 0.03031371053819889 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.22797927461139897, "acc_stderr": 0.030276909945178274, "acc_norm": 0.22797927461139897, "acc_norm_stderr": 0.030276909945178274 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2358974358974359, "acc_stderr": 0.02152596540740872, "acc_norm": 0.2358974358974359, "acc_norm_stderr": 0.02152596540740872 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712163, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712163 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.2647058823529412, "acc_stderr": 0.02865749128507199, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.02865749128507199 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.26238532110091745, "acc_stderr": 0.01886188502153473, "acc_norm": 0.26238532110091745, "acc_norm_stderr": 0.01886188502153473 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.18981481481481483, "acc_stderr": 0.026744714834691936, "acc_norm": 0.18981481481481483, "acc_norm_stderr": 0.026744714834691936 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.22058823529411764, "acc_stderr": 0.029102254389674082, "acc_norm": 0.22058823529411764, "acc_norm_stderr": 0.029102254389674082 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.24472573839662448, "acc_stderr": 0.027985699387036423, "acc_norm": 0.24472573839662448, "acc_norm_stderr": 0.027985699387036423 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.4080717488789238, "acc_stderr": 0.03298574607842821, "acc_norm": 0.4080717488789238, "acc_norm_stderr": 0.03298574607842821 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.03807387116306086, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.03807387116306086 }, "harness|hendrycksTest-international_law|5": { "acc": 0.35537190082644626, "acc_stderr": 0.04369236326573981, "acc_norm": 0.35537190082644626, "acc_norm_stderr": 0.04369236326573981 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.3055555555555556, "acc_stderr": 0.04453197507374984, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.04453197507374984 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2392638036809816, "acc_stderr": 0.033519538795212696, "acc_norm": 0.2392638036809816, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467763, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467763 }, "harness|hendrycksTest-management|5": { "acc": 0.30097087378640774, "acc_stderr": 0.04541609446503947, "acc_norm": 0.30097087378640774, "acc_norm_stderr": 0.04541609446503947 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2692307692307692, "acc_stderr": 0.029058588303748842, "acc_norm": 0.2692307692307692, "acc_norm_stderr": 0.029058588303748842 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.3052362707535121, "acc_stderr": 0.01646771194763513, "acc_norm": 0.3052362707535121, "acc_norm_stderr": 0.01646771194763513 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2861271676300578, "acc_stderr": 0.024332146779134124, "acc_norm": 0.2861271676300578, "acc_norm_stderr": 0.024332146779134124 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23687150837988827, "acc_stderr": 0.014219570788103982, "acc_norm": 0.23687150837988827, "acc_norm_stderr": 0.014219570788103982 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2647058823529412, "acc_stderr": 0.025261691219729484, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.025261691219729484 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.29260450160771706, "acc_stderr": 0.02583989833487798, "acc_norm": 0.29260450160771706, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.26851851851851855, "acc_stderr": 0.024659685185967277, "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.024659685185967277 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2730496453900709, "acc_stderr": 0.026577860943307857, "acc_norm": 0.2730496453900709, "acc_norm_stderr": 0.026577860943307857 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.242503259452412, "acc_stderr": 0.010946570966348768, "acc_norm": 0.242503259452412, "acc_norm_stderr": 0.010946570966348768 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.1948529411764706, "acc_stderr": 0.024060599423487414, "acc_norm": 0.1948529411764706, "acc_norm_stderr": 0.024060599423487414 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.272875816993464, "acc_stderr": 0.01802047414839358, "acc_norm": 0.272875816993464, "acc_norm_stderr": 0.01802047414839358 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.33636363636363636, "acc_stderr": 0.04525393596302505, "acc_norm": 0.33636363636363636, "acc_norm_stderr": 0.04525393596302505 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.3142857142857143, "acc_stderr": 0.029719329422417468, "acc_norm": 0.3142857142857143, "acc_norm_stderr": 0.029719329422417468 }, "harness|hendrycksTest-sociology|5": { "acc": 0.2736318407960199, "acc_stderr": 0.031524391865554016, "acc_norm": 0.2736318407960199, "acc_norm_stderr": 0.031524391865554016 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-virology|5": { "acc": 0.3192771084337349, "acc_stderr": 0.0362933532994786, "acc_norm": 0.3192771084337349, "acc_norm_stderr": 0.0362933532994786 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.28654970760233917, "acc_stderr": 0.034678266857038266, "acc_norm": 0.28654970760233917, "acc_norm_stderr": 0.034678266857038266 }, "harness|truthfulqa:mc|0": { "mc1": 0.2386780905752754, "mc1_stderr": 0.014922629695456418, "mc2": 0.3611141228461351, "mc2_stderr": 0.01348753730197199 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
Roscall/Rickastley50
2023-09-02T01:29:33.000Z
[ "region:us" ]
Roscall
null
null
null
0
0
Entry not found
yzhuang/autotree_automl_house_16H_gosdt_l512_d3_sd2
2023-09-02T01:38:21.000Z
[ "region:us" ]
yzhuang
null
null
null
0
0
--- dataset_info: features: - name: id dtype: int64 - name: input_x sequence: sequence: float64 - name: input_y sequence: sequence: float32 - name: rtg sequence: float64 - name: status sequence: sequence: float32 - name: split_threshold sequence: sequence: float64 - name: split_dimension sequence: int64 splits: - name: train num_bytes: 9224800000 num_examples: 100000 - name: validation num_bytes: 922480000 num_examples: 10000 download_size: 3195628749 dataset_size: 10147280000 --- # Dataset Card for "autotree_automl_house_16H_gosdt_l512_d3_sd2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
open-llm-leaderboard/details_Devio__test-22B
2023-09-02T01:40:10.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of Devio/test-22B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Devio/test-22B](https://huggingface.co/Devio/test-22B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Devio__test-22B\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-02T01:38:52.675251](https://huggingface.co/datasets/open-llm-leaderboard/details_Devio__test-22B/blob/main/results_2023-09-02T01%3A38%3A52.675251.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2758923391191029,\n\ \ \"acc_stderr\": 0.03201388252201146,\n \"acc_norm\": 0.2797296584858208,\n\ \ \"acc_norm_stderr\": 0.032017183492893266,\n \"mc1\": 0.22888616891064872,\n\ \ \"mc1_stderr\": 0.014706994909055027,\n \"mc2\": 0.3713156999785537,\n\ \ \"mc2_stderr\": 0.014209414703476026\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.3438566552901024,\n \"acc_stderr\": 0.01388064457015621,\n\ \ \"acc_norm\": 0.39419795221843,\n \"acc_norm_stderr\": 0.014280522667467325\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4690300736904999,\n\ \ \"acc_stderr\": 0.004980200451851677,\n \"acc_norm\": 0.6450906193985262,\n\ \ \"acc_norm_stderr\": 0.004775079636567092\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.22962962962962963,\n\ \ \"acc_stderr\": 0.03633384414073462,\n \"acc_norm\": 0.22962962962962963,\n\ \ \"acc_norm_stderr\": 0.03633384414073462\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.32894736842105265,\n \"acc_stderr\": 0.03823428969926604,\n\ \ \"acc_norm\": 0.32894736842105265,\n \"acc_norm_stderr\": 0.03823428969926604\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.17,\n\ \ \"acc_stderr\": 0.0377525168068637,\n \"acc_norm\": 0.17,\n \ \ \"acc_norm_stderr\": 0.0377525168068637\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.2981132075471698,\n \"acc_stderr\": 0.028152837942493854,\n\ \ \"acc_norm\": 0.2981132075471698,\n \"acc_norm_stderr\": 0.028152837942493854\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.24305555555555555,\n\ \ \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.24305555555555555,\n\ \ \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n\ \ \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \ \ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3063583815028902,\n\ \ \"acc_stderr\": 0.03514942551267438,\n \"acc_norm\": 0.3063583815028902,\n\ \ \"acc_norm_stderr\": 0.03514942551267438\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.045338381959297736,\n\ \ \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.045338381959297736\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.21,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.21,\n\ \ \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.28936170212765955,\n \"acc_stderr\": 0.02964400657700962,\n\ \ \"acc_norm\": 0.28936170212765955,\n \"acc_norm_stderr\": 0.02964400657700962\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\ \ \"acc_stderr\": 0.039994238792813344,\n \"acc_norm\": 0.23684210526315788,\n\ \ \"acc_norm_stderr\": 0.039994238792813344\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2689655172413793,\n \"acc_stderr\": 0.036951833116502325,\n\ \ \"acc_norm\": 0.2689655172413793,\n \"acc_norm_stderr\": 0.036951833116502325\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.2698412698412698,\n \"acc_stderr\": 0.022860838309232072,\n \"\ acc_norm\": 0.2698412698412698,\n \"acc_norm_stderr\": 0.022860838309232072\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23015873015873015,\n\ \ \"acc_stderr\": 0.03764950879790606,\n \"acc_norm\": 0.23015873015873015,\n\ \ \"acc_norm_stderr\": 0.03764950879790606\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.33225806451612905,\n \"acc_stderr\": 0.026795560848122797,\n \"\ acc_norm\": 0.33225806451612905,\n \"acc_norm_stderr\": 0.026795560848122797\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.2955665024630542,\n \"acc_stderr\": 0.032104944337514575,\n \"\ acc_norm\": 0.2955665024630542,\n \"acc_norm_stderr\": 0.032104944337514575\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\"\ : 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.24848484848484848,\n \"acc_stderr\": 0.03374402644139404,\n\ \ \"acc_norm\": 0.24848484848484848,\n \"acc_norm_stderr\": 0.03374402644139404\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.35353535353535354,\n \"acc_stderr\": 0.03406086723547153,\n \"\ acc_norm\": 0.35353535353535354,\n \"acc_norm_stderr\": 0.03406086723547153\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.35233160621761656,\n \"acc_stderr\": 0.03447478286414359,\n\ \ \"acc_norm\": 0.35233160621761656,\n \"acc_norm_stderr\": 0.03447478286414359\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.3564102564102564,\n \"acc_stderr\": 0.024283140529467295,\n\ \ \"acc_norm\": 0.3564102564102564,\n \"acc_norm_stderr\": 0.024283140529467295\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.26296296296296295,\n \"acc_stderr\": 0.02684205787383371,\n \ \ \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.02684205787383371\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.031041941304059288,\n\ \ \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.031041941304059288\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242741,\n \"\ acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242741\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.3486238532110092,\n \"acc_stderr\": 0.020431254090714324,\n \"\ acc_norm\": 0.3486238532110092,\n \"acc_norm_stderr\": 0.020431254090714324\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.3888888888888889,\n \"acc_stderr\": 0.03324708911809117,\n \"\ acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.03324708911809117\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.25980392156862747,\n \"acc_stderr\": 0.03077855467869326,\n \"\ acc_norm\": 0.25980392156862747,\n \"acc_norm_stderr\": 0.03077855467869326\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.20253164556962025,\n \"acc_stderr\": 0.026160568246601457,\n \ \ \"acc_norm\": 0.20253164556962025,\n \"acc_norm_stderr\": 0.026160568246601457\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.1210762331838565,\n\ \ \"acc_stderr\": 0.021894174113185737,\n \"acc_norm\": 0.1210762331838565,\n\ \ \"acc_norm_stderr\": 0.021894174113185737\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.32061068702290074,\n \"acc_stderr\": 0.04093329229834277,\n\ \ \"acc_norm\": 0.32061068702290074,\n \"acc_norm_stderr\": 0.04093329229834277\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.17355371900826447,\n \"acc_stderr\": 0.03457272836917671,\n \"\ acc_norm\": 0.17355371900826447,\n \"acc_norm_stderr\": 0.03457272836917671\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2222222222222222,\n\ \ \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.2222222222222222,\n\ \ \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.2331288343558282,\n \"acc_stderr\": 0.033220157957767414,\n\ \ \"acc_norm\": 0.2331288343558282,\n \"acc_norm_stderr\": 0.033220157957767414\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.16964285714285715,\n\ \ \"acc_stderr\": 0.03562367850095391,\n \"acc_norm\": 0.16964285714285715,\n\ \ \"acc_norm_stderr\": 0.03562367850095391\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.3786407766990291,\n \"acc_stderr\": 0.04802694698258972,\n\ \ \"acc_norm\": 0.3786407766990291,\n \"acc_norm_stderr\": 0.04802694698258972\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.19658119658119658,\n\ \ \"acc_stderr\": 0.02603538609895129,\n \"acc_norm\": 0.19658119658119658,\n\ \ \"acc_norm_stderr\": 0.02603538609895129\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \ \ \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.21839080459770116,\n\ \ \"acc_stderr\": 0.0147743583199345,\n \"acc_norm\": 0.21839080459770116,\n\ \ \"acc_norm_stderr\": 0.0147743583199345\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.21676300578034682,\n \"acc_stderr\": 0.02218347766841286,\n\ \ \"acc_norm\": 0.21676300578034682,\n \"acc_norm_stderr\": 0.02218347766841286\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24134078212290502,\n\ \ \"acc_stderr\": 0.014310999547961459,\n \"acc_norm\": 0.24134078212290502,\n\ \ \"acc_norm_stderr\": 0.014310999547961459\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.29411764705882354,\n \"acc_stderr\": 0.026090162504279053,\n\ \ \"acc_norm\": 0.29411764705882354,\n \"acc_norm_stderr\": 0.026090162504279053\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2604501607717042,\n\ \ \"acc_stderr\": 0.02492672322484554,\n \"acc_norm\": 0.2604501607717042,\n\ \ \"acc_norm_stderr\": 0.02492672322484554\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.22839506172839505,\n \"acc_stderr\": 0.023358211840626267,\n\ \ \"acc_norm\": 0.22839506172839505,\n \"acc_norm_stderr\": 0.023358211840626267\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.2695035460992908,\n \"acc_stderr\": 0.026469036818590624,\n \ \ \"acc_norm\": 0.2695035460992908,\n \"acc_norm_stderr\": 0.026469036818590624\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24511082138200782,\n\ \ \"acc_stderr\": 0.010986307870045517,\n \"acc_norm\": 0.24511082138200782,\n\ \ \"acc_norm_stderr\": 0.010986307870045517\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.4485294117647059,\n \"acc_stderr\": 0.030211479609121593,\n\ \ \"acc_norm\": 0.4485294117647059,\n \"acc_norm_stderr\": 0.030211479609121593\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.2173202614379085,\n \"acc_stderr\": 0.01668482092914859,\n \ \ \"acc_norm\": 0.2173202614379085,\n \"acc_norm_stderr\": 0.01668482092914859\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2545454545454545,\n\ \ \"acc_stderr\": 0.04172343038705383,\n \"acc_norm\": 0.2545454545454545,\n\ \ \"acc_norm_stderr\": 0.04172343038705383\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.031362502409358936,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.031362502409358936\n \ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.29850746268656714,\n\ \ \"acc_stderr\": 0.032357437893550424,\n \"acc_norm\": 0.29850746268656714,\n\ \ \"acc_norm_stderr\": 0.032357437893550424\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768078,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.1927710843373494,\n\ \ \"acc_stderr\": 0.030709824050565274,\n \"acc_norm\": 0.1927710843373494,\n\ \ \"acc_norm_stderr\": 0.030709824050565274\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.1695906432748538,\n \"acc_stderr\": 0.028782108105401712,\n\ \ \"acc_norm\": 0.1695906432748538,\n \"acc_norm_stderr\": 0.028782108105401712\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22888616891064872,\n\ \ \"mc1_stderr\": 0.014706994909055027,\n \"mc2\": 0.3713156999785537,\n\ \ \"mc2_stderr\": 0.014209414703476026\n }\n}\n```" repo_url: https://huggingface.co/Devio/test-22B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|arc:challenge|25_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hellaswag|10_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T01:38:52.675251.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T01:38:52.675251.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T01_38_52.675251 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T01:38:52.675251.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T01:38:52.675251.parquet' - config_name: results data_files: - split: 2023_09_02T01_38_52.675251 path: - results_2023-09-02T01:38:52.675251.parquet - split: latest path: - results_2023-09-02T01:38:52.675251.parquet --- # Dataset Card for Evaluation run of Devio/test-22B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Devio/test-22B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [Devio/test-22B](https://huggingface.co/Devio/test-22B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Devio__test-22B", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-02T01:38:52.675251](https://huggingface.co/datasets/open-llm-leaderboard/details_Devio__test-22B/blob/main/results_2023-09-02T01%3A38%3A52.675251.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2758923391191029, "acc_stderr": 0.03201388252201146, "acc_norm": 0.2797296584858208, "acc_norm_stderr": 0.032017183492893266, "mc1": 0.22888616891064872, "mc1_stderr": 0.014706994909055027, "mc2": 0.3713156999785537, "mc2_stderr": 0.014209414703476026 }, "harness|arc:challenge|25": { "acc": 0.3438566552901024, "acc_stderr": 0.01388064457015621, "acc_norm": 0.39419795221843, "acc_norm_stderr": 0.014280522667467325 }, "harness|hellaswag|10": { "acc": 0.4690300736904999, "acc_stderr": 0.004980200451851677, "acc_norm": 0.6450906193985262, "acc_norm_stderr": 0.004775079636567092 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768081, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.22962962962962963, "acc_stderr": 0.03633384414073462, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.03633384414073462 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.32894736842105265, "acc_stderr": 0.03823428969926604, "acc_norm": 0.32894736842105265, "acc_norm_stderr": 0.03823428969926604 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.17, "acc_stderr": 0.0377525168068637, "acc_norm": 0.17, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2981132075471698, "acc_stderr": 0.028152837942493854, "acc_norm": 0.2981132075471698, "acc_norm_stderr": 0.028152837942493854 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.24305555555555555, "acc_stderr": 0.03586879280080341, "acc_norm": 0.24305555555555555, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3063583815028902, "acc_stderr": 0.03514942551267438, "acc_norm": 0.3063583815028902, "acc_norm_stderr": 0.03514942551267438 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.29411764705882354, "acc_stderr": 0.045338381959297736, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.045338381959297736 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.21, "acc_stderr": 0.04093601807403326, "acc_norm": 0.21, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.28936170212765955, "acc_stderr": 0.02964400657700962, "acc_norm": 0.28936170212765955, "acc_norm_stderr": 0.02964400657700962 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813344, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813344 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2689655172413793, "acc_stderr": 0.036951833116502325, "acc_norm": 0.2689655172413793, "acc_norm_stderr": 0.036951833116502325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2698412698412698, "acc_stderr": 0.022860838309232072, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.022860838309232072 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23015873015873015, "acc_stderr": 0.03764950879790606, "acc_norm": 0.23015873015873015, "acc_norm_stderr": 0.03764950879790606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.33225806451612905, "acc_stderr": 0.026795560848122797, "acc_norm": 0.33225806451612905, "acc_norm_stderr": 0.026795560848122797 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2955665024630542, "acc_stderr": 0.032104944337514575, "acc_norm": 0.2955665024630542, "acc_norm_stderr": 0.032104944337514575 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.24848484848484848, "acc_stderr": 0.03374402644139404, "acc_norm": 0.24848484848484848, "acc_norm_stderr": 0.03374402644139404 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.35353535353535354, "acc_stderr": 0.03406086723547153, "acc_norm": 0.35353535353535354, "acc_norm_stderr": 0.03406086723547153 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.35233160621761656, "acc_stderr": 0.03447478286414359, "acc_norm": 0.35233160621761656, "acc_norm_stderr": 0.03447478286414359 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3564102564102564, "acc_stderr": 0.024283140529467295, "acc_norm": 0.3564102564102564, "acc_norm_stderr": 0.024283140529467295 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.02684205787383371, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.02684205787383371 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.031041941304059288, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.031041941304059288 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242741, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242741 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3486238532110092, "acc_stderr": 0.020431254090714324, "acc_norm": 0.3486238532110092, "acc_norm_stderr": 0.020431254090714324 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.03324708911809117, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.03324708911809117 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25980392156862747, "acc_stderr": 0.03077855467869326, "acc_norm": 0.25980392156862747, "acc_norm_stderr": 0.03077855467869326 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.20253164556962025, "acc_stderr": 0.026160568246601457, "acc_norm": 0.20253164556962025, "acc_norm_stderr": 0.026160568246601457 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.1210762331838565, "acc_stderr": 0.021894174113185737, "acc_norm": 0.1210762331838565, "acc_norm_stderr": 0.021894174113185737 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.32061068702290074, "acc_stderr": 0.04093329229834277, "acc_norm": 0.32061068702290074, "acc_norm_stderr": 0.04093329229834277 }, "harness|hendrycksTest-international_law|5": { "acc": 0.17355371900826447, "acc_stderr": 0.03457272836917671, "acc_norm": 0.17355371900826447, "acc_norm_stderr": 0.03457272836917671 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2222222222222222, "acc_stderr": 0.040191074725573483, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2331288343558282, "acc_stderr": 0.033220157957767414, "acc_norm": 0.2331288343558282, "acc_norm_stderr": 0.033220157957767414 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.16964285714285715, "acc_stderr": 0.03562367850095391, "acc_norm": 0.16964285714285715, "acc_norm_stderr": 0.03562367850095391 }, "harness|hendrycksTest-management|5": { "acc": 0.3786407766990291, "acc_stderr": 0.04802694698258972, "acc_norm": 0.3786407766990291, "acc_norm_stderr": 0.04802694698258972 }, "harness|hendrycksTest-marketing|5": { "acc": 0.19658119658119658, "acc_stderr": 0.02603538609895129, "acc_norm": 0.19658119658119658, "acc_norm_stderr": 0.02603538609895129 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.21839080459770116, "acc_stderr": 0.0147743583199345, "acc_norm": 0.21839080459770116, "acc_norm_stderr": 0.0147743583199345 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.21676300578034682, "acc_stderr": 0.02218347766841286, "acc_norm": 0.21676300578034682, "acc_norm_stderr": 0.02218347766841286 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24134078212290502, "acc_stderr": 0.014310999547961459, "acc_norm": 0.24134078212290502, "acc_norm_stderr": 0.014310999547961459 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.29411764705882354, "acc_stderr": 0.026090162504279053, "acc_norm": 0.29411764705882354, "acc_norm_stderr": 0.026090162504279053 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2604501607717042, "acc_stderr": 0.02492672322484554, "acc_norm": 0.2604501607717042, "acc_norm_stderr": 0.02492672322484554 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.22839506172839505, "acc_stderr": 0.023358211840626267, "acc_norm": 0.22839506172839505, "acc_norm_stderr": 0.023358211840626267 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2695035460992908, "acc_stderr": 0.026469036818590624, "acc_norm": 0.2695035460992908, "acc_norm_stderr": 0.026469036818590624 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24511082138200782, "acc_stderr": 0.010986307870045517, "acc_norm": 0.24511082138200782, "acc_norm_stderr": 0.010986307870045517 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.030211479609121593, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.030211479609121593 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2173202614379085, "acc_stderr": 0.01668482092914859, "acc_norm": 0.2173202614379085, "acc_norm_stderr": 0.01668482092914859 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2545454545454545, "acc_stderr": 0.04172343038705383, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.04172343038705383 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.4, "acc_stderr": 0.031362502409358936, "acc_norm": 0.4, "acc_norm_stderr": 0.031362502409358936 }, "harness|hendrycksTest-sociology|5": { "acc": 0.29850746268656714, "acc_stderr": 0.032357437893550424, "acc_norm": 0.29850746268656714, "acc_norm_stderr": 0.032357437893550424 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.04408440022768078, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-virology|5": { "acc": 0.1927710843373494, "acc_stderr": 0.030709824050565274, "acc_norm": 0.1927710843373494, "acc_norm_stderr": 0.030709824050565274 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.1695906432748538, "acc_stderr": 0.028782108105401712, "acc_norm": 0.1695906432748538, "acc_norm_stderr": 0.028782108105401712 }, "harness|truthfulqa:mc|0": { "mc1": 0.22888616891064872, "mc1_stderr": 0.014706994909055027, "mc2": 0.3713156999785537, "mc2_stderr": 0.014209414703476026 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
alifaER/reverso
2023-09-02T01:54:30.000Z
[ "region:us" ]
alifaER
null
null
null
0
0
Entry not found
satic69/vezi-123
2023-09-02T02:07:07.000Z
[ "region:us" ]
satic69
null
null
null
0
0
Entry not found
Devbdtec/test
2023-09-02T01:59:12.000Z
[ "region:us" ]
Devbdtec
null
null
null
0
0
Entry not found
open-llm-leaderboard/details_venkycs__llama-v2-7b-32kC-Security
2023-09-16T22:13:05.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of venkycs/llama-v2-7b-32kC-Security dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [venkycs/llama-v2-7b-32kC-Security](https://huggingface.co/venkycs/llama-v2-7b-32kC-Security)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_venkycs__llama-v2-7b-32kC-Security\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-09-16T22:12:53.343595](https://huggingface.co/datasets/open-llm-leaderboard/details_venkycs__llama-v2-7b-32kC-Security/blob/main/results_2023-09-16T22-12-53.343595.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0014681208053691276,\n\ \ \"em_stderr\": 0.00039210421902984526,\n \"f1\": 0.05811556208053706,\n\ \ \"f1_stderr\": 0.0013703157651110908,\n \"acc\": 0.37805421700805364,\n\ \ \"acc_stderr\": 0.00898232300653474\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0014681208053691276,\n \"em_stderr\": 0.00039210421902984526,\n\ \ \"f1\": 0.05811556208053706,\n \"f1_stderr\": 0.0013703157651110908\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.03866565579984837,\n \ \ \"acc_stderr\": 0.005310583162098087\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7174427782162589,\n \"acc_stderr\": 0.012654062850971393\n\ \ }\n}\n```" repo_url: https://huggingface.co/venkycs/llama-v2-7b-32kC-Security leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|arc:challenge|25_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T02:09:05.697790.parquet' - config_name: harness_drop_3 data_files: - split: 2023_09_16T22_12_53.343595 path: - '**/details_harness|drop|3_2023-09-16T22-12-53.343595.parquet' - split: latest path: - '**/details_harness|drop|3_2023-09-16T22-12-53.343595.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_09_16T22_12_53.343595 path: - '**/details_harness|gsm8k|5_2023-09-16T22-12-53.343595.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-09-16T22-12-53.343595.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hellaswag|10_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T02:09:05.697790.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T02:09:05.697790.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T02_09_05.697790 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T02:09:05.697790.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T02:09:05.697790.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_09_16T22_12_53.343595 path: - '**/details_harness|winogrande|5_2023-09-16T22-12-53.343595.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-09-16T22-12-53.343595.parquet' - config_name: results data_files: - split: 2023_09_02T02_09_05.697790 path: - results_2023-09-02T02:09:05.697790.parquet - split: 2023_09_16T22_12_53.343595 path: - results_2023-09-16T22-12-53.343595.parquet - split: latest path: - results_2023-09-16T22-12-53.343595.parquet --- # Dataset Card for Evaluation run of venkycs/llama-v2-7b-32kC-Security ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/venkycs/llama-v2-7b-32kC-Security - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [venkycs/llama-v2-7b-32kC-Security](https://huggingface.co/venkycs/llama-v2-7b-32kC-Security) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_venkycs__llama-v2-7b-32kC-Security", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-16T22:12:53.343595](https://huggingface.co/datasets/open-llm-leaderboard/details_venkycs__llama-v2-7b-32kC-Security/blob/main/results_2023-09-16T22-12-53.343595.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0014681208053691276, "em_stderr": 0.00039210421902984526, "f1": 0.05811556208053706, "f1_stderr": 0.0013703157651110908, "acc": 0.37805421700805364, "acc_stderr": 0.00898232300653474 }, "harness|drop|3": { "em": 0.0014681208053691276, "em_stderr": 0.00039210421902984526, "f1": 0.05811556208053706, "f1_stderr": 0.0013703157651110908 }, "harness|gsm8k|5": { "acc": 0.03866565579984837, "acc_stderr": 0.005310583162098087 }, "harness|winogrande|5": { "acc": 0.7174427782162589, "acc_stderr": 0.012654062850971393 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
MJM666/7772
2023-09-02T04:08:45.000Z
[ "region:us" ]
MJM666
null
null
null
0
0
Entry not found
jordonpeter01/fuego-20230902-041357-9d81ce
2023-09-02T05:10:47.000Z
[ "fuego", "region:us" ]
jordonpeter01
null
null
null
0
0
--- tags: - fuego fuego: id: 20230902-041357-9d81ce status: done script: run_glue.py requirements_file: requirements.txt space_id: jordonpeter01/fuego-20230902-041357-9d81ce space_hardware: cpu-basic github_repo_id: huggingface/transformers github_repo_branch: main github_repo_sha: 0afa5071bd84e44301750fdc594e33db102cf374 ---
KhalfounMehdi/mura_dataset_processed
2023-09-02T02:14:31.000Z
[ "region:us" ]
KhalfounMehdi
null
null
null
0
0
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': abnormal '1': normal splits: - name: train num_bytes: 235536715.375 num_examples: 40005 download_size: 238614754 dataset_size: 235536715.375 --- # Dataset Card for "mura_dataset_processed" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
bocksnum/garnttrain
2023-09-02T02:18:13.000Z
[ "region:us" ]
bocksnum
null
null
null
0
0
Entry not found
Thouph/aesthetic_score
2023-09-16T20:55:03.000Z
[ "license:cc-by-4.0", "region:us" ]
Thouph
null
null
null
0
0
--- license: cc-by-4.0 ---
jordonpeter01/fuego-20230902-041902-c6d36e
2023-09-02T03:53:20.000Z
[ "fuego", "region:us" ]
jordonpeter01
null
null
null
0
0
--- tags: - fuego fuego: id: 20230902-041902-c6d36e status: done script: run_glue.py requirements_file: requirements.txt space_id: jordonpeter01/fuego-20230902-041902-c6d36e space_hardware: cpu-basic github_repo_id: huggingface/transformers github_repo_branch: main github_repo_sha: 0afa5071bd84e44301750fdc594e33db102cf374 ---
KhalfounMehdi/mura_dataset_processed_224px
2023-09-02T02:37:18.000Z
[ "region:us" ]
KhalfounMehdi
null
null
null
0
0
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': abnormal '1': normal splits: - name: train num_bytes: 997379264.375 num_examples: 40005 download_size: 997532653 dataset_size: 997379264.375 --- # Dataset Card for "mura_dataset_processed_224px" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
KhalfounMehdi/mura_dataset_processed_224px_split
2023-09-02T02:39:42.000Z
[ "region:us" ]
KhalfounMehdi
null
null
null
0
0
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': abnormal '1': normal splits: - name: train num_bytes: 897597368.7549056 num_examples: 36004 - name: test num_bytes: 99746891.24509436 num_examples: 4001 download_size: 997622999 dataset_size: 997344260.0 --- # Dataset Card for "mura_dataset_processed_224px_split" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
KhalfounMehdi/mura_dataset_processed_224px_train_val
2023-09-02T03:30:24.000Z
[ "region:us" ]
KhalfounMehdi
null
null
null
0
0
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* - split: validation path: data/validation-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': abnormal '1': normal splits: - name: train num_bytes: 897597369.6548932 num_examples: 36004 - name: test num_bytes: 99746891.34510686 num_examples: 4001 - name: validation num_bytes: 99746891.34510686 num_examples: 4001 download_size: 1097353191 dataset_size: 1097091152.3451068 --- # Dataset Card for "mura_dataset_processed_224px_train_val" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
open-llm-leaderboard/details_NobodyExistsOnTheInternet__GiftedConvo13bLoraNoEcons
2023-09-02T03:31:26.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of NobodyExistsOnTheInternet/GiftedConvo13bLoraNoEcons dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [NobodyExistsOnTheInternet/GiftedConvo13bLoraNoEcons](https://huggingface.co/NobodyExistsOnTheInternet/GiftedConvo13bLoraNoEcons)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NobodyExistsOnTheInternet__GiftedConvo13bLoraNoEcons\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-02T03:30:04.510392](https://huggingface.co/datasets/open-llm-leaderboard/details_NobodyExistsOnTheInternet__GiftedConvo13bLoraNoEcons/blob/main/results_2023-09-02T03%3A30%3A04.510392.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5529622895675308,\n\ \ \"acc_stderr\": 0.0344246880991781,\n \"acc_norm\": 0.5569805116637543,\n\ \ \"acc_norm_stderr\": 0.0344035382161514,\n \"mc1\": 0.2717258261933905,\n\ \ \"mc1_stderr\": 0.015572840452875833,\n \"mc2\": 0.40555969573922585,\n\ \ \"mc2_stderr\": 0.014178083758751547\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5537542662116041,\n \"acc_stderr\": 0.014526705548539982,\n\ \ \"acc_norm\": 0.5938566552901023,\n \"acc_norm_stderr\": 0.014351656690097862\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6349332802230632,\n\ \ \"acc_stderr\": 0.004804649197163696,\n \"acc_norm\": 0.8319059948217487,\n\ \ \"acc_norm_stderr\": 0.003731854957030938\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5111111111111111,\n\ \ \"acc_stderr\": 0.04318275491977976,\n \"acc_norm\": 0.5111111111111111,\n\ \ \"acc_norm_stderr\": 0.04318275491977976\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5394736842105263,\n \"acc_stderr\": 0.04056242252249034,\n\ \ \"acc_norm\": 0.5394736842105263,\n \"acc_norm_stderr\": 0.04056242252249034\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.52,\n\ \ \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \ \ \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5924528301886792,\n \"acc_stderr\": 0.030242233800854494,\n\ \ \"acc_norm\": 0.5924528301886792,\n \"acc_norm_stderr\": 0.030242233800854494\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5763888888888888,\n\ \ \"acc_stderr\": 0.041321250197233685,\n \"acc_norm\": 0.5763888888888888,\n\ \ \"acc_norm_stderr\": 0.041321250197233685\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \ \ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n\ \ \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \ \ \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5260115606936416,\n\ \ \"acc_stderr\": 0.03807301726504511,\n \"acc_norm\": 0.5260115606936416,\n\ \ \"acc_norm_stderr\": 0.03807301726504511\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237655,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237655\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n\ \ \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.425531914893617,\n \"acc_stderr\": 0.03232146916224468,\n\ \ \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.03232146916224468\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.32456140350877194,\n\ \ \"acc_stderr\": 0.04404556157374768,\n \"acc_norm\": 0.32456140350877194,\n\ \ \"acc_norm_stderr\": 0.04404556157374768\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5103448275862069,\n \"acc_stderr\": 0.04165774775728762,\n\ \ \"acc_norm\": 0.5103448275862069,\n \"acc_norm_stderr\": 0.04165774775728762\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.31216931216931215,\n \"acc_stderr\": 0.023865206836972602,\n \"\ acc_norm\": 0.31216931216931215,\n \"acc_norm_stderr\": 0.023865206836972602\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n\ \ \"acc_stderr\": 0.043062412591271526,\n \"acc_norm\": 0.36507936507936506,\n\ \ \"acc_norm_stderr\": 0.043062412591271526\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.6387096774193548,\n \"acc_stderr\": 0.027327548447957532,\n \"\ acc_norm\": 0.6387096774193548,\n \"acc_norm_stderr\": 0.027327548447957532\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.43842364532019706,\n \"acc_stderr\": 0.03491207857486518,\n \"\ acc_norm\": 0.43842364532019706,\n \"acc_norm_stderr\": 0.03491207857486518\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.56,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\"\ : 0.56,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6848484848484848,\n \"acc_stderr\": 0.036277305750224094,\n\ \ \"acc_norm\": 0.6848484848484848,\n \"acc_norm_stderr\": 0.036277305750224094\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.6717171717171717,\n \"acc_stderr\": 0.03345678422756775,\n \"\ acc_norm\": 0.6717171717171717,\n \"acc_norm_stderr\": 0.03345678422756775\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8134715025906736,\n \"acc_stderr\": 0.02811209121011747,\n\ \ \"acc_norm\": 0.8134715025906736,\n \"acc_norm_stderr\": 0.02811209121011747\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.4948717948717949,\n \"acc_stderr\": 0.025349672906838653,\n\ \ \"acc_norm\": 0.4948717948717949,\n \"acc_norm_stderr\": 0.025349672906838653\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3111111111111111,\n \"acc_stderr\": 0.028226446749683515,\n \ \ \"acc_norm\": 0.3111111111111111,\n \"acc_norm_stderr\": 0.028226446749683515\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5966386554621849,\n \"acc_stderr\": 0.031866081214088314,\n\ \ \"acc_norm\": 0.5966386554621849,\n \"acc_norm_stderr\": 0.031866081214088314\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.31788079470198677,\n \"acc_stderr\": 0.03802039760107903,\n \"\ acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.03802039760107903\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7394495412844037,\n \"acc_stderr\": 0.01881918203485007,\n \"\ acc_norm\": 0.7394495412844037,\n \"acc_norm_stderr\": 0.01881918203485007\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4537037037037037,\n \"acc_stderr\": 0.03395322726375797,\n \"\ acc_norm\": 0.4537037037037037,\n \"acc_norm_stderr\": 0.03395322726375797\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7450980392156863,\n \"acc_stderr\": 0.030587591351604246,\n \"\ acc_norm\": 0.7450980392156863,\n \"acc_norm_stderr\": 0.030587591351604246\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7383966244725738,\n \"acc_stderr\": 0.028609516716994934,\n \ \ \"acc_norm\": 0.7383966244725738,\n \"acc_norm_stderr\": 0.028609516716994934\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6457399103139013,\n\ \ \"acc_stderr\": 0.03210062154134986,\n \"acc_norm\": 0.6457399103139013,\n\ \ \"acc_norm_stderr\": 0.03210062154134986\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.648854961832061,\n \"acc_stderr\": 0.04186445163013751,\n\ \ \"acc_norm\": 0.648854961832061,\n \"acc_norm_stderr\": 0.04186445163013751\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7355371900826446,\n \"acc_stderr\": 0.04026187527591207,\n \"\ acc_norm\": 0.7355371900826446,\n \"acc_norm_stderr\": 0.04026187527591207\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n\ \ \"acc_stderr\": 0.044531975073749834,\n \"acc_norm\": 0.6944444444444444,\n\ \ \"acc_norm_stderr\": 0.044531975073749834\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6625766871165644,\n \"acc_stderr\": 0.037149084099355745,\n\ \ \"acc_norm\": 0.6625766871165644,\n \"acc_norm_stderr\": 0.037149084099355745\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.38392857142857145,\n\ \ \"acc_stderr\": 0.04616143075028547,\n \"acc_norm\": 0.38392857142857145,\n\ \ \"acc_norm_stderr\": 0.04616143075028547\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n\ \ \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7905982905982906,\n\ \ \"acc_stderr\": 0.026655699653922747,\n \"acc_norm\": 0.7905982905982906,\n\ \ \"acc_norm_stderr\": 0.026655699653922747\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \ \ \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7535121328224776,\n\ \ \"acc_stderr\": 0.015411308769686934,\n \"acc_norm\": 0.7535121328224776,\n\ \ \"acc_norm_stderr\": 0.015411308769686934\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.025906632631016127,\n\ \ \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.025906632631016127\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.26033519553072626,\n\ \ \"acc_stderr\": 0.014676252009319478,\n \"acc_norm\": 0.26033519553072626,\n\ \ \"acc_norm_stderr\": 0.014676252009319478\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6241830065359477,\n \"acc_stderr\": 0.027732834353363947,\n\ \ \"acc_norm\": 0.6241830065359477,\n \"acc_norm_stderr\": 0.027732834353363947\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6527331189710611,\n\ \ \"acc_stderr\": 0.027040745502307336,\n \"acc_norm\": 0.6527331189710611,\n\ \ \"acc_norm_stderr\": 0.027040745502307336\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.026725868809100793,\n\ \ \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.026725868809100793\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.3971631205673759,\n \"acc_stderr\": 0.029189805673587088,\n \ \ \"acc_norm\": 0.3971631205673759,\n \"acc_norm_stderr\": 0.029189805673587088\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.43285528031290743,\n\ \ \"acc_stderr\": 0.012654565234622864,\n \"acc_norm\": 0.43285528031290743,\n\ \ \"acc_norm_stderr\": 0.012654565234622864\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5073529411764706,\n \"acc_stderr\": 0.030369552523902173,\n\ \ \"acc_norm\": 0.5073529411764706,\n \"acc_norm_stderr\": 0.030369552523902173\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5751633986928104,\n \"acc_stderr\": 0.01999797303545833,\n \ \ \"acc_norm\": 0.5751633986928104,\n \"acc_norm_stderr\": 0.01999797303545833\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n\ \ \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \ \ \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6122448979591837,\n \"acc_stderr\": 0.031192230726795656,\n\ \ \"acc_norm\": 0.6122448979591837,\n \"acc_norm_stderr\": 0.031192230726795656\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7213930348258707,\n\ \ \"acc_stderr\": 0.031700561834973086,\n \"acc_norm\": 0.7213930348258707,\n\ \ \"acc_norm_stderr\": 0.031700561834973086\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.82,\n \"acc_stderr\": 0.038612291966536934,\n \ \ \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.038612291966536934\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4578313253012048,\n\ \ \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.4578313253012048,\n\ \ \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.031581495393387324,\n\ \ \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.031581495393387324\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2717258261933905,\n\ \ \"mc1_stderr\": 0.015572840452875833,\n \"mc2\": 0.40555969573922585,\n\ \ \"mc2_stderr\": 0.014178083758751547\n }\n}\n```" repo_url: https://huggingface.co/NobodyExistsOnTheInternet/GiftedConvo13bLoraNoEcons leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|arc:challenge|25_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hellaswag|10_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T03:30:04.510392.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T03:30:04.510392.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T03_30_04.510392 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T03:30:04.510392.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T03:30:04.510392.parquet' - config_name: results data_files: - split: 2023_09_02T03_30_04.510392 path: - results_2023-09-02T03:30:04.510392.parquet - split: latest path: - results_2023-09-02T03:30:04.510392.parquet --- # Dataset Card for Evaluation run of NobodyExistsOnTheInternet/GiftedConvo13bLoraNoEcons ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/NobodyExistsOnTheInternet/GiftedConvo13bLoraNoEcons - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [NobodyExistsOnTheInternet/GiftedConvo13bLoraNoEcons](https://huggingface.co/NobodyExistsOnTheInternet/GiftedConvo13bLoraNoEcons) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NobodyExistsOnTheInternet__GiftedConvo13bLoraNoEcons", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-02T03:30:04.510392](https://huggingface.co/datasets/open-llm-leaderboard/details_NobodyExistsOnTheInternet__GiftedConvo13bLoraNoEcons/blob/main/results_2023-09-02T03%3A30%3A04.510392.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5529622895675308, "acc_stderr": 0.0344246880991781, "acc_norm": 0.5569805116637543, "acc_norm_stderr": 0.0344035382161514, "mc1": 0.2717258261933905, "mc1_stderr": 0.015572840452875833, "mc2": 0.40555969573922585, "mc2_stderr": 0.014178083758751547 }, "harness|arc:challenge|25": { "acc": 0.5537542662116041, "acc_stderr": 0.014526705548539982, "acc_norm": 0.5938566552901023, "acc_norm_stderr": 0.014351656690097862 }, "harness|hellaswag|10": { "acc": 0.6349332802230632, "acc_stderr": 0.004804649197163696, "acc_norm": 0.8319059948217487, "acc_norm_stderr": 0.003731854957030938 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5111111111111111, "acc_stderr": 0.04318275491977976, "acc_norm": 0.5111111111111111, "acc_norm_stderr": 0.04318275491977976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5394736842105263, "acc_stderr": 0.04056242252249034, "acc_norm": 0.5394736842105263, "acc_norm_stderr": 0.04056242252249034 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5924528301886792, "acc_stderr": 0.030242233800854494, "acc_norm": 0.5924528301886792, "acc_norm_stderr": 0.030242233800854494 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5763888888888888, "acc_stderr": 0.041321250197233685, "acc_norm": 0.5763888888888888, "acc_norm_stderr": 0.041321250197233685 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5260115606936416, "acc_stderr": 0.03807301726504511, "acc_norm": 0.5260115606936416, "acc_norm_stderr": 0.03807301726504511 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.425531914893617, "acc_stderr": 0.03232146916224468, "acc_norm": 0.425531914893617, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.32456140350877194, "acc_stderr": 0.04404556157374768, "acc_norm": 0.32456140350877194, "acc_norm_stderr": 0.04404556157374768 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5103448275862069, "acc_stderr": 0.04165774775728762, "acc_norm": 0.5103448275862069, "acc_norm_stderr": 0.04165774775728762 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.31216931216931215, "acc_stderr": 0.023865206836972602, "acc_norm": 0.31216931216931215, "acc_norm_stderr": 0.023865206836972602 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.043062412591271526, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.043062412591271526 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6387096774193548, "acc_stderr": 0.027327548447957532, "acc_norm": 0.6387096774193548, "acc_norm_stderr": 0.027327548447957532 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.43842364532019706, "acc_stderr": 0.03491207857486518, "acc_norm": 0.43842364532019706, "acc_norm_stderr": 0.03491207857486518 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.56, "acc_stderr": 0.049888765156985884, "acc_norm": 0.56, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6848484848484848, "acc_stderr": 0.036277305750224094, "acc_norm": 0.6848484848484848, "acc_norm_stderr": 0.036277305750224094 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6717171717171717, "acc_stderr": 0.03345678422756775, "acc_norm": 0.6717171717171717, "acc_norm_stderr": 0.03345678422756775 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8134715025906736, "acc_stderr": 0.02811209121011747, "acc_norm": 0.8134715025906736, "acc_norm_stderr": 0.02811209121011747 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4948717948717949, "acc_stderr": 0.025349672906838653, "acc_norm": 0.4948717948717949, "acc_norm_stderr": 0.025349672906838653 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.028226446749683515, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.028226446749683515 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5966386554621849, "acc_stderr": 0.031866081214088314, "acc_norm": 0.5966386554621849, "acc_norm_stderr": 0.031866081214088314 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.03802039760107903, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.03802039760107903 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7394495412844037, "acc_stderr": 0.01881918203485007, "acc_norm": 0.7394495412844037, "acc_norm_stderr": 0.01881918203485007 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4537037037037037, "acc_stderr": 0.03395322726375797, "acc_norm": 0.4537037037037037, "acc_norm_stderr": 0.03395322726375797 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7450980392156863, "acc_stderr": 0.030587591351604246, "acc_norm": 0.7450980392156863, "acc_norm_stderr": 0.030587591351604246 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7383966244725738, "acc_stderr": 0.028609516716994934, "acc_norm": 0.7383966244725738, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6457399103139013, "acc_stderr": 0.03210062154134986, "acc_norm": 0.6457399103139013, "acc_norm_stderr": 0.03210062154134986 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.648854961832061, "acc_stderr": 0.04186445163013751, "acc_norm": 0.648854961832061, "acc_norm_stderr": 0.04186445163013751 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7355371900826446, "acc_stderr": 0.04026187527591207, "acc_norm": 0.7355371900826446, "acc_norm_stderr": 0.04026187527591207 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6944444444444444, "acc_stderr": 0.044531975073749834, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.044531975073749834 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6625766871165644, "acc_stderr": 0.037149084099355745, "acc_norm": 0.6625766871165644, "acc_norm_stderr": 0.037149084099355745 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028547, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028547 }, "harness|hendrycksTest-management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7905982905982906, "acc_stderr": 0.026655699653922747, "acc_norm": 0.7905982905982906, "acc_norm_stderr": 0.026655699653922747 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7535121328224776, "acc_stderr": 0.015411308769686934, "acc_norm": 0.7535121328224776, "acc_norm_stderr": 0.015411308769686934 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6358381502890174, "acc_stderr": 0.025906632631016127, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.025906632631016127 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.26033519553072626, "acc_stderr": 0.014676252009319478, "acc_norm": 0.26033519553072626, "acc_norm_stderr": 0.014676252009319478 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6241830065359477, "acc_stderr": 0.027732834353363947, "acc_norm": 0.6241830065359477, "acc_norm_stderr": 0.027732834353363947 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6527331189710611, "acc_stderr": 0.027040745502307336, "acc_norm": 0.6527331189710611, "acc_norm_stderr": 0.027040745502307336 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6388888888888888, "acc_stderr": 0.026725868809100793, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.026725868809100793 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3971631205673759, "acc_stderr": 0.029189805673587088, "acc_norm": 0.3971631205673759, "acc_norm_stderr": 0.029189805673587088 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.43285528031290743, "acc_stderr": 0.012654565234622864, "acc_norm": 0.43285528031290743, "acc_norm_stderr": 0.012654565234622864 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5073529411764706, "acc_stderr": 0.030369552523902173, "acc_norm": 0.5073529411764706, "acc_norm_stderr": 0.030369552523902173 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5751633986928104, "acc_stderr": 0.01999797303545833, "acc_norm": 0.5751633986928104, "acc_norm_stderr": 0.01999797303545833 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6122448979591837, "acc_stderr": 0.031192230726795656, "acc_norm": 0.6122448979591837, "acc_norm_stderr": 0.031192230726795656 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7213930348258707, "acc_stderr": 0.031700561834973086, "acc_norm": 0.7213930348258707, "acc_norm_stderr": 0.031700561834973086 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.038612291966536934, "acc_norm": 0.82, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-virology|5": { "acc": 0.4578313253012048, "acc_stderr": 0.0387862677100236, "acc_norm": 0.4578313253012048, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.783625730994152, "acc_stderr": 0.031581495393387324, "acc_norm": 0.783625730994152, "acc_norm_stderr": 0.031581495393387324 }, "harness|truthfulqa:mc|0": { "mc1": 0.2717258261933905, "mc1_stderr": 0.015572840452875833, "mc2": 0.40555969573922585, "mc2_stderr": 0.014178083758751547 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_uukuguy__speechless-llama2-13b
2023-09-12T13:58:24.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of uukuguy/speechless-llama2-13b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [uukuguy/speechless-llama2-13b](https://huggingface.co/uukuguy/speechless-llama2-13b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 3 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_uukuguy__speechless-llama2-13b\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-12T13:57:07.476950](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-llama2-13b/blob/main/results_2023-09-12T13-57-07.476950.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5869376489115714,\n\ \ \"acc_stderr\": 0.0340383920445088,\n \"acc_norm\": 0.5910399944637348,\n\ \ \"acc_norm_stderr\": 0.03401701193254301,\n \"mc1\": 0.3880048959608323,\n\ \ \"mc1_stderr\": 0.017058761501347972,\n \"mc2\": 0.5562491990096062,\n\ \ \"mc2_stderr\": 0.01544713306521873\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5776450511945392,\n \"acc_stderr\": 0.014434138713379983,\n\ \ \"acc_norm\": 0.6220136518771331,\n \"acc_norm_stderr\": 0.0141696645203031\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6210914160525791,\n\ \ \"acc_stderr\": 0.004841238763529372,\n \"acc_norm\": 0.81876120294762,\n\ \ \"acc_norm_stderr\": 0.003844286350624635\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n\ \ \"acc_stderr\": 0.04284958639753401,\n \"acc_norm\": 0.562962962962963,\n\ \ \"acc_norm_stderr\": 0.04284958639753401\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5723684210526315,\n \"acc_stderr\": 0.04026097083296563,\n\ \ \"acc_norm\": 0.5723684210526315,\n \"acc_norm_stderr\": 0.04026097083296563\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n\ \ \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \ \ \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.630188679245283,\n \"acc_stderr\": 0.02971142188010793,\n\ \ \"acc_norm\": 0.630188679245283,\n \"acc_norm_stderr\": 0.02971142188010793\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6458333333333334,\n\ \ \"acc_stderr\": 0.039994111357535424,\n \"acc_norm\": 0.6458333333333334,\n\ \ \"acc_norm_stderr\": 0.039994111357535424\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \ \ \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n\ \ \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5549132947976878,\n\ \ \"acc_stderr\": 0.03789401760283647,\n \"acc_norm\": 0.5549132947976878,\n\ \ \"acc_norm_stderr\": 0.03789401760283647\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3235294117647059,\n \"acc_stderr\": 0.04655010411319616,\n\ \ \"acc_norm\": 0.3235294117647059,\n \"acc_norm_stderr\": 0.04655010411319616\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n\ \ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4978723404255319,\n \"acc_stderr\": 0.03268572658667492,\n\ \ \"acc_norm\": 0.4978723404255319,\n \"acc_norm_stderr\": 0.03268572658667492\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n\ \ \"acc_stderr\": 0.04185774424022056,\n \"acc_norm\": 0.2719298245614035,\n\ \ \"acc_norm_stderr\": 0.04185774424022056\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192118,\n\ \ \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192118\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.373015873015873,\n \"acc_stderr\": 0.02490699045899257,\n \"acc_norm\"\ : 0.373015873015873,\n \"acc_norm_stderr\": 0.02490699045899257\n },\n\ \ \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n\ \ \"acc_stderr\": 0.04306241259127152,\n \"acc_norm\": 0.36507936507936506,\n\ \ \"acc_norm_stderr\": 0.04306241259127152\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6903225806451613,\n\ \ \"acc_stderr\": 0.026302774983517414,\n \"acc_norm\": 0.6903225806451613,\n\ \ \"acc_norm_stderr\": 0.026302774983517414\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n\ \ \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\"\ : 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.703030303030303,\n \"acc_stderr\": 0.03567969772268049,\n\ \ \"acc_norm\": 0.703030303030303,\n \"acc_norm_stderr\": 0.03567969772268049\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7575757575757576,\n \"acc_stderr\": 0.030532892233932026,\n \"\ acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.030532892233932026\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.02614848346915331,\n\ \ \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.02614848346915331\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.6256410256410256,\n \"acc_stderr\": 0.0245375915728305,\n \ \ \"acc_norm\": 0.6256410256410256,\n \"acc_norm_stderr\": 0.0245375915728305\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3148148148148148,\n \"acc_stderr\": 0.02831753349606648,\n \ \ \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.02831753349606648\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.6008403361344538,\n \"acc_stderr\": 0.03181110032413926,\n \ \ \"acc_norm\": 0.6008403361344538,\n \"acc_norm_stderr\": 0.03181110032413926\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.304635761589404,\n \"acc_stderr\": 0.037579499229433426,\n \"\ acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.037579499229433426\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7944954128440367,\n \"acc_stderr\": 0.017324352325016012,\n \"\ acc_norm\": 0.7944954128440367,\n \"acc_norm_stderr\": 0.017324352325016012\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.41203703703703703,\n \"acc_stderr\": 0.03356787758160835,\n \"\ acc_norm\": 0.41203703703703703,\n \"acc_norm_stderr\": 0.03356787758160835\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7990196078431373,\n \"acc_stderr\": 0.02812597226565438,\n \"\ acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.02812597226565438\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.759493670886076,\n \"acc_stderr\": 0.027820781981149685,\n \ \ \"acc_norm\": 0.759493670886076,\n \"acc_norm_stderr\": 0.027820781981149685\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n\ \ \"acc_stderr\": 0.03114679648297246,\n \"acc_norm\": 0.6860986547085202,\n\ \ \"acc_norm_stderr\": 0.03114679648297246\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.648854961832061,\n \"acc_stderr\": 0.04186445163013751,\n\ \ \"acc_norm\": 0.648854961832061,\n \"acc_norm_stderr\": 0.04186445163013751\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7107438016528925,\n \"acc_stderr\": 0.04139112727635463,\n \"\ acc_norm\": 0.7107438016528925,\n \"acc_norm_stderr\": 0.04139112727635463\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n\ \ \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n\ \ \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6809815950920245,\n \"acc_stderr\": 0.03661997551073836,\n\ \ \"acc_norm\": 0.6809815950920245,\n \"acc_norm_stderr\": 0.03661997551073836\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.38392857142857145,\n\ \ \"acc_stderr\": 0.04616143075028547,\n \"acc_norm\": 0.38392857142857145,\n\ \ \"acc_norm_stderr\": 0.04616143075028547\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n\ \ \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8247863247863247,\n\ \ \"acc_stderr\": 0.02490443909891823,\n \"acc_norm\": 0.8247863247863247,\n\ \ \"acc_norm_stderr\": 0.02490443909891823\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \ \ \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7637292464878672,\n\ \ \"acc_stderr\": 0.015190473717037495,\n \"acc_norm\": 0.7637292464878672,\n\ \ \"acc_norm_stderr\": 0.015190473717037495\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.025722802200895803,\n\ \ \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.025722802200895803\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.423463687150838,\n\ \ \"acc_stderr\": 0.016525425898773514,\n \"acc_norm\": 0.423463687150838,\n\ \ \"acc_norm_stderr\": 0.016525425898773514\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6601307189542484,\n \"acc_stderr\": 0.027121956071388856,\n\ \ \"acc_norm\": 0.6601307189542484,\n \"acc_norm_stderr\": 0.027121956071388856\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6559485530546624,\n\ \ \"acc_stderr\": 0.026981478043648043,\n \"acc_norm\": 0.6559485530546624,\n\ \ \"acc_norm_stderr\": 0.026981478043648043\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6512345679012346,\n \"acc_stderr\": 0.026517597724465013,\n\ \ \"acc_norm\": 0.6512345679012346,\n \"acc_norm_stderr\": 0.026517597724465013\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.4645390070921986,\n \"acc_stderr\": 0.029752389657427047,\n \ \ \"acc_norm\": 0.4645390070921986,\n \"acc_norm_stderr\": 0.029752389657427047\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.438722294654498,\n\ \ \"acc_stderr\": 0.012673969883493272,\n \"acc_norm\": 0.438722294654498,\n\ \ \"acc_norm_stderr\": 0.012673969883493272\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5992647058823529,\n \"acc_stderr\": 0.029768263528933105,\n\ \ \"acc_norm\": 0.5992647058823529,\n \"acc_norm_stderr\": 0.029768263528933105\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.565359477124183,\n \"acc_stderr\": 0.020054269200726463,\n \ \ \"acc_norm\": 0.565359477124183,\n \"acc_norm_stderr\": 0.020054269200726463\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n\ \ \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n\ \ \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.673469387755102,\n \"acc_stderr\": 0.03002105623844031,\n\ \ \"acc_norm\": 0.673469387755102,\n \"acc_norm_stderr\": 0.03002105623844031\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7562189054726368,\n\ \ \"acc_stderr\": 0.030360490154014645,\n \"acc_norm\": 0.7562189054726368,\n\ \ \"acc_norm_stderr\": 0.030360490154014645\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \ \ \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n\ \ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4759036144578313,\n\ \ \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.4759036144578313,\n\ \ \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7719298245614035,\n \"acc_stderr\": 0.03218093795602357,\n\ \ \"acc_norm\": 0.7719298245614035,\n \"acc_norm_stderr\": 0.03218093795602357\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3880048959608323,\n\ \ \"mc1_stderr\": 0.017058761501347972,\n \"mc2\": 0.5562491990096062,\n\ \ \"mc2_stderr\": 0.01544713306521873\n }\n}\n```" repo_url: https://huggingface.co/uukuguy/speechless-llama2-13b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|arc:challenge|25_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|arc:challenge|25_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|arc:challenge|25_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hellaswag|10_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hellaswag|10_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hellaswag|10_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T03:45:23.206143.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T15:58:18.299905.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-12T13-57-07.476950.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-management|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-12T13-57-07.476950.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T03_45_23.206143 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T03:45:23.206143.parquet' - split: 2023_09_02T15_58_18.299905 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T15:58:18.299905.parquet' - split: 2023_09_12T13_57_07.476950 path: - '**/details_harness|truthfulqa:mc|0_2023-09-12T13-57-07.476950.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-12T13-57-07.476950.parquet' - config_name: results data_files: - split: 2023_09_02T03_45_23.206143 path: - results_2023-09-02T03:45:23.206143.parquet - split: 2023_09_02T15_58_18.299905 path: - results_2023-09-02T15:58:18.299905.parquet - split: 2023_09_12T13_57_07.476950 path: - results_2023-09-12T13-57-07.476950.parquet - split: latest path: - results_2023-09-12T13-57-07.476950.parquet --- # Dataset Card for Evaluation run of uukuguy/speechless-llama2-13b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/uukuguy/speechless-llama2-13b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [uukuguy/speechless-llama2-13b](https://huggingface.co/uukuguy/speechless-llama2-13b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 3 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_uukuguy__speechless-llama2-13b", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-12T13:57:07.476950](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-llama2-13b/blob/main/results_2023-09-12T13-57-07.476950.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5869376489115714, "acc_stderr": 0.0340383920445088, "acc_norm": 0.5910399944637348, "acc_norm_stderr": 0.03401701193254301, "mc1": 0.3880048959608323, "mc1_stderr": 0.017058761501347972, "mc2": 0.5562491990096062, "mc2_stderr": 0.01544713306521873 }, "harness|arc:challenge|25": { "acc": 0.5776450511945392, "acc_stderr": 0.014434138713379983, "acc_norm": 0.6220136518771331, "acc_norm_stderr": 0.0141696645203031 }, "harness|hellaswag|10": { "acc": 0.6210914160525791, "acc_stderr": 0.004841238763529372, "acc_norm": 0.81876120294762, "acc_norm_stderr": 0.003844286350624635 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.562962962962963, "acc_stderr": 0.04284958639753401, "acc_norm": 0.562962962962963, "acc_norm_stderr": 0.04284958639753401 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5723684210526315, "acc_stderr": 0.04026097083296563, "acc_norm": 0.5723684210526315, "acc_norm_stderr": 0.04026097083296563 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.630188679245283, "acc_stderr": 0.02971142188010793, "acc_norm": 0.630188679245283, "acc_norm_stderr": 0.02971142188010793 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6458333333333334, "acc_stderr": 0.039994111357535424, "acc_norm": 0.6458333333333334, "acc_norm_stderr": 0.039994111357535424 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5549132947976878, "acc_stderr": 0.03789401760283647, "acc_norm": 0.5549132947976878, "acc_norm_stderr": 0.03789401760283647 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.04655010411319616, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.04655010411319616 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4978723404255319, "acc_stderr": 0.03268572658667492, "acc_norm": 0.4978723404255319, "acc_norm_stderr": 0.03268572658667492 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022056, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022056 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192118, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192118 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.373015873015873, "acc_stderr": 0.02490699045899257, "acc_norm": 0.373015873015873, "acc_norm_stderr": 0.02490699045899257 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127152, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127152 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6903225806451613, "acc_stderr": 0.026302774983517414, "acc_norm": 0.6903225806451613, "acc_norm_stderr": 0.026302774983517414 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.03517945038691063, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.703030303030303, "acc_stderr": 0.03567969772268049, "acc_norm": 0.703030303030303, "acc_norm_stderr": 0.03567969772268049 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7575757575757576, "acc_stderr": 0.030532892233932026, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.030532892233932026 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.844559585492228, "acc_stderr": 0.02614848346915331, "acc_norm": 0.844559585492228, "acc_norm_stderr": 0.02614848346915331 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6256410256410256, "acc_stderr": 0.0245375915728305, "acc_norm": 0.6256410256410256, "acc_norm_stderr": 0.0245375915728305 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.02831753349606648, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.02831753349606648 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6008403361344538, "acc_stderr": 0.03181110032413926, "acc_norm": 0.6008403361344538, "acc_norm_stderr": 0.03181110032413926 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.037579499229433426, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.037579499229433426 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7944954128440367, "acc_stderr": 0.017324352325016012, "acc_norm": 0.7944954128440367, "acc_norm_stderr": 0.017324352325016012 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.41203703703703703, "acc_stderr": 0.03356787758160835, "acc_norm": 0.41203703703703703, "acc_norm_stderr": 0.03356787758160835 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7990196078431373, "acc_stderr": 0.02812597226565438, "acc_norm": 0.7990196078431373, "acc_norm_stderr": 0.02812597226565438 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.759493670886076, "acc_stderr": 0.027820781981149685, "acc_norm": 0.759493670886076, "acc_norm_stderr": 0.027820781981149685 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.03114679648297246, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.03114679648297246 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.648854961832061, "acc_stderr": 0.04186445163013751, "acc_norm": 0.648854961832061, "acc_norm_stderr": 0.04186445163013751 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7107438016528925, "acc_stderr": 0.04139112727635463, "acc_norm": 0.7107438016528925, "acc_norm_stderr": 0.04139112727635463 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6809815950920245, "acc_stderr": 0.03661997551073836, "acc_norm": 0.6809815950920245, "acc_norm_stderr": 0.03661997551073836 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028547, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028547 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8247863247863247, "acc_stderr": 0.02490443909891823, "acc_norm": 0.8247863247863247, "acc_norm_stderr": 0.02490443909891823 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7637292464878672, "acc_stderr": 0.015190473717037495, "acc_norm": 0.7637292464878672, "acc_norm_stderr": 0.015190473717037495 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6473988439306358, "acc_stderr": 0.025722802200895803, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.025722802200895803 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.423463687150838, "acc_stderr": 0.016525425898773514, "acc_norm": 0.423463687150838, "acc_norm_stderr": 0.016525425898773514 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6601307189542484, "acc_stderr": 0.027121956071388856, "acc_norm": 0.6601307189542484, "acc_norm_stderr": 0.027121956071388856 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6559485530546624, "acc_stderr": 0.026981478043648043, "acc_norm": 0.6559485530546624, "acc_norm_stderr": 0.026981478043648043 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6512345679012346, "acc_stderr": 0.026517597724465013, "acc_norm": 0.6512345679012346, "acc_norm_stderr": 0.026517597724465013 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4645390070921986, "acc_stderr": 0.029752389657427047, "acc_norm": 0.4645390070921986, "acc_norm_stderr": 0.029752389657427047 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.438722294654498, "acc_stderr": 0.012673969883493272, "acc_norm": 0.438722294654498, "acc_norm_stderr": 0.012673969883493272 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5992647058823529, "acc_stderr": 0.029768263528933105, "acc_norm": 0.5992647058823529, "acc_norm_stderr": 0.029768263528933105 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.565359477124183, "acc_stderr": 0.020054269200726463, "acc_norm": 0.565359477124183, "acc_norm_stderr": 0.020054269200726463 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.673469387755102, "acc_stderr": 0.03002105623844031, "acc_norm": 0.673469387755102, "acc_norm_stderr": 0.03002105623844031 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7562189054726368, "acc_stderr": 0.030360490154014645, "acc_norm": 0.7562189054726368, "acc_norm_stderr": 0.030360490154014645 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.0377525168068637, "acc_norm": 0.83, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-virology|5": { "acc": 0.4759036144578313, "acc_stderr": 0.03887971849597264, "acc_norm": 0.4759036144578313, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7719298245614035, "acc_stderr": 0.03218093795602357, "acc_norm": 0.7719298245614035, "acc_norm_stderr": 0.03218093795602357 }, "harness|truthfulqa:mc|0": { "mc1": 0.3880048959608323, "mc1_stderr": 0.017058761501347972, "mc2": 0.5562491990096062, "mc2_stderr": 0.01544713306521873 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj
2023-09-02T04:15:20.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj](https://huggingface.co/CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-02T04:13:58.324897](https://huggingface.co/datasets/open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj/blob/main/results_2023-09-02T04%3A13%3A58.324897.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5565028631304673,\n\ \ \"acc_stderr\": 0.03440865789809741,\n \"acc_norm\": 0.5605230783595532,\n\ \ \"acc_norm_stderr\": 0.03438904167387677,\n \"mc1\": 0.26560587515299877,\n\ \ \"mc1_stderr\": 0.015461027627253597,\n \"mc2\": 0.3918689435636596,\n\ \ \"mc2_stderr\": 0.013891603262173636\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5443686006825939,\n \"acc_stderr\": 0.014553749939306866,\n\ \ \"acc_norm\": 0.5742320819112628,\n \"acc_norm_stderr\": 0.014449464278868809\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6169089822744473,\n\ \ \"acc_stderr\": 0.004851466623601451,\n \"acc_norm\": 0.8242381995618403,\n\ \ \"acc_norm_stderr\": 0.0037983950550215377\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4962962962962963,\n\ \ \"acc_stderr\": 0.04319223625811331,\n \"acc_norm\": 0.4962962962962963,\n\ \ \"acc_norm_stderr\": 0.04319223625811331\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5394736842105263,\n \"acc_stderr\": 0.04056242252249034,\n\ \ \"acc_norm\": 0.5394736842105263,\n \"acc_norm_stderr\": 0.04056242252249034\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n\ \ \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.55,\n \ \ \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5962264150943396,\n \"acc_stderr\": 0.03019761160019795,\n\ \ \"acc_norm\": 0.5962264150943396,\n \"acc_norm_stderr\": 0.03019761160019795\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5972222222222222,\n\ \ \"acc_stderr\": 0.04101405519842425,\n \"acc_norm\": 0.5972222222222222,\n\ \ \"acc_norm_stderr\": 0.04101405519842425\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n\ \ \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5317919075144508,\n\ \ \"acc_stderr\": 0.03804749744364764,\n \"acc_norm\": 0.5317919075144508,\n\ \ \"acc_norm_stderr\": 0.03804749744364764\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.04440521906179328,\n\ \ \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.04440521906179328\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n\ \ \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.425531914893617,\n \"acc_stderr\": 0.03232146916224468,\n\ \ \"acc_norm\": 0.425531914893617,\n \"acc_norm_stderr\": 0.03232146916224468\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n\ \ \"acc_stderr\": 0.041424397194893624,\n \"acc_norm\": 0.2631578947368421,\n\ \ \"acc_norm_stderr\": 0.041424397194893624\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.04166567577101579,\n\ \ \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.04166567577101579\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.35185185185185186,\n \"acc_stderr\": 0.02459497512892094,\n \"\ acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.02459497512892094\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.35714285714285715,\n\ \ \"acc_stderr\": 0.042857142857142816,\n \"acc_norm\": 0.35714285714285715,\n\ \ \"acc_norm_stderr\": 0.042857142857142816\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.632258064516129,\n \"acc_stderr\": 0.027430866579973467,\n \"\ acc_norm\": 0.632258064516129,\n \"acc_norm_stderr\": 0.027430866579973467\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.4433497536945813,\n \"acc_stderr\": 0.03495334582162933,\n \"\ acc_norm\": 0.4433497536945813,\n \"acc_norm_stderr\": 0.03495334582162933\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.62,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\"\ : 0.62,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6787878787878788,\n \"acc_stderr\": 0.0364620496325381,\n\ \ \"acc_norm\": 0.6787878787878788,\n \"acc_norm_stderr\": 0.0364620496325381\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7171717171717171,\n \"acc_stderr\": 0.03208779558786752,\n \"\ acc_norm\": 0.7171717171717171,\n \"acc_norm_stderr\": 0.03208779558786752\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8082901554404145,\n \"acc_stderr\": 0.02840895362624527,\n\ \ \"acc_norm\": 0.8082901554404145,\n \"acc_norm_stderr\": 0.02840895362624527\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.49743589743589745,\n \"acc_stderr\": 0.025350672979412195,\n\ \ \"acc_norm\": 0.49743589743589745,\n \"acc_norm_stderr\": 0.025350672979412195\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.31851851851851853,\n \"acc_stderr\": 0.02840653309060847,\n \ \ \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.02840653309060847\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5210084033613446,\n \"acc_stderr\": 0.03244980849990029,\n \ \ \"acc_norm\": 0.5210084033613446,\n \"acc_norm_stderr\": 0.03244980849990029\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"\ acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7431192660550459,\n \"acc_stderr\": 0.018732492928342465,\n \"\ acc_norm\": 0.7431192660550459,\n \"acc_norm_stderr\": 0.018732492928342465\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.41203703703703703,\n \"acc_stderr\": 0.03356787758160835,\n \"\ acc_norm\": 0.41203703703703703,\n \"acc_norm_stderr\": 0.03356787758160835\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.75,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.75,\n\ \ \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\ : {\n \"acc\": 0.7257383966244726,\n \"acc_stderr\": 0.029041333510598028,\n\ \ \"acc_norm\": 0.7257383966244726,\n \"acc_norm_stderr\": 0.029041333510598028\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6636771300448431,\n\ \ \"acc_stderr\": 0.031708824268455005,\n \"acc_norm\": 0.6636771300448431,\n\ \ \"acc_norm_stderr\": 0.031708824268455005\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6106870229007634,\n \"acc_stderr\": 0.04276486542814591,\n\ \ \"acc_norm\": 0.6106870229007634,\n \"acc_norm_stderr\": 0.04276486542814591\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7272727272727273,\n \"acc_stderr\": 0.04065578140908705,\n \"\ acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04065578140908705\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6851851851851852,\n\ \ \"acc_stderr\": 0.04489931073591312,\n \"acc_norm\": 0.6851851851851852,\n\ \ \"acc_norm_stderr\": 0.04489931073591312\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6503067484662577,\n \"acc_stderr\": 0.03746668325470021,\n\ \ \"acc_norm\": 0.6503067484662577,\n \"acc_norm_stderr\": 0.03746668325470021\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.25892857142857145,\n\ \ \"acc_stderr\": 0.04157751539865629,\n \"acc_norm\": 0.25892857142857145,\n\ \ \"acc_norm_stderr\": 0.04157751539865629\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n\ \ \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7991452991452992,\n\ \ \"acc_stderr\": 0.02624677294689048,\n \"acc_norm\": 0.7991452991452992,\n\ \ \"acc_norm_stderr\": 0.02624677294689048\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.58,\n \"acc_stderr\": 0.04960449637488583,\n \ \ \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.04960449637488583\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7777777777777778,\n\ \ \"acc_stderr\": 0.01486682166470959,\n \"acc_norm\": 0.7777777777777778,\n\ \ \"acc_norm_stderr\": 0.01486682166470959\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6329479768786127,\n \"acc_stderr\": 0.02595005433765407,\n\ \ \"acc_norm\": 0.6329479768786127,\n \"acc_norm_stderr\": 0.02595005433765407\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4,\n\ \ \"acc_stderr\": 0.01638463841038082,\n \"acc_norm\": 0.4,\n \ \ \"acc_norm_stderr\": 0.01638463841038082\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6209150326797386,\n \"acc_stderr\": 0.027780141207023344,\n\ \ \"acc_norm\": 0.6209150326797386,\n \"acc_norm_stderr\": 0.027780141207023344\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6334405144694534,\n\ \ \"acc_stderr\": 0.027368078243971646,\n \"acc_norm\": 0.6334405144694534,\n\ \ \"acc_norm_stderr\": 0.027368078243971646\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6358024691358025,\n \"acc_stderr\": 0.026774929899722334,\n\ \ \"acc_norm\": 0.6358024691358025,\n \"acc_norm_stderr\": 0.026774929899722334\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.41134751773049644,\n \"acc_stderr\": 0.02935491115994098,\n \ \ \"acc_norm\": 0.41134751773049644,\n \"acc_norm_stderr\": 0.02935491115994098\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41395045632333766,\n\ \ \"acc_stderr\": 0.012579699631289265,\n \"acc_norm\": 0.41395045632333766,\n\ \ \"acc_norm_stderr\": 0.012579699631289265\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5441176470588235,\n \"acc_stderr\": 0.030254372573976722,\n\ \ \"acc_norm\": 0.5441176470588235,\n \"acc_norm_stderr\": 0.030254372573976722\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5522875816993464,\n \"acc_stderr\": 0.020116925347422425,\n \ \ \"acc_norm\": 0.5522875816993464,\n \"acc_norm_stderr\": 0.020116925347422425\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6181818181818182,\n\ \ \"acc_stderr\": 0.046534298079135075,\n \"acc_norm\": 0.6181818181818182,\n\ \ \"acc_norm_stderr\": 0.046534298079135075\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6285714285714286,\n \"acc_stderr\": 0.030932858792789848,\n\ \ \"acc_norm\": 0.6285714285714286,\n \"acc_norm_stderr\": 0.030932858792789848\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7412935323383084,\n\ \ \"acc_stderr\": 0.030965903123573037,\n \"acc_norm\": 0.7412935323383084,\n\ \ \"acc_norm_stderr\": 0.030965903123573037\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4457831325301205,\n\ \ \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.4457831325301205,\n\ \ \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7719298245614035,\n \"acc_stderr\": 0.032180937956023566,\n\ \ \"acc_norm\": 0.7719298245614035,\n \"acc_norm_stderr\": 0.032180937956023566\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.26560587515299877,\n\ \ \"mc1_stderr\": 0.015461027627253597,\n \"mc2\": 0.3918689435636596,\n\ \ \"mc2_stderr\": 0.013891603262173636\n }\n}\n```" repo_url: https://huggingface.co/CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|arc:challenge|25_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hellaswag|10_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T04:13:58.324897.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T04:13:58.324897.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T04_13_58.324897 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T04:13:58.324897.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T04:13:58.324897.parquet' - config_name: results data_files: - split: 2023_09_02T04_13_58.324897 path: - results_2023-09-02T04:13:58.324897.parquet - split: latest path: - results_2023-09-02T04:13:58.324897.parquet --- # Dataset Card for Evaluation run of CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj](https://huggingface.co/CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-02T04:13:58.324897](https://huggingface.co/datasets/open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-huangyt_FINETUNE2_3w-gate_up_down_proj/blob/main/results_2023-09-02T04%3A13%3A58.324897.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5565028631304673, "acc_stderr": 0.03440865789809741, "acc_norm": 0.5605230783595532, "acc_norm_stderr": 0.03438904167387677, "mc1": 0.26560587515299877, "mc1_stderr": 0.015461027627253597, "mc2": 0.3918689435636596, "mc2_stderr": 0.013891603262173636 }, "harness|arc:challenge|25": { "acc": 0.5443686006825939, "acc_stderr": 0.014553749939306866, "acc_norm": 0.5742320819112628, "acc_norm_stderr": 0.014449464278868809 }, "harness|hellaswag|10": { "acc": 0.6169089822744473, "acc_stderr": 0.004851466623601451, "acc_norm": 0.8242381995618403, "acc_norm_stderr": 0.0037983950550215377 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4962962962962963, "acc_stderr": 0.04319223625811331, "acc_norm": 0.4962962962962963, "acc_norm_stderr": 0.04319223625811331 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5394736842105263, "acc_stderr": 0.04056242252249034, "acc_norm": 0.5394736842105263, "acc_norm_stderr": 0.04056242252249034 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5962264150943396, "acc_stderr": 0.03019761160019795, "acc_norm": 0.5962264150943396, "acc_norm_stderr": 0.03019761160019795 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5972222222222222, "acc_stderr": 0.04101405519842425, "acc_norm": 0.5972222222222222, "acc_norm_stderr": 0.04101405519842425 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5317919075144508, "acc_stderr": 0.03804749744364764, "acc_norm": 0.5317919075144508, "acc_norm_stderr": 0.03804749744364764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.04440521906179328, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.04440521906179328 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.425531914893617, "acc_stderr": 0.03232146916224468, "acc_norm": 0.425531914893617, "acc_norm_stderr": 0.03232146916224468 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.041424397194893624, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.041424397194893624 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.04166567577101579, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.04166567577101579 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.02459497512892094, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.02459497512892094 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.35714285714285715, "acc_stderr": 0.042857142857142816, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.042857142857142816 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.632258064516129, "acc_stderr": 0.027430866579973467, "acc_norm": 0.632258064516129, "acc_norm_stderr": 0.027430866579973467 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4433497536945813, "acc_stderr": 0.03495334582162933, "acc_norm": 0.4433497536945813, "acc_norm_stderr": 0.03495334582162933 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.62, "acc_stderr": 0.04878317312145632, "acc_norm": 0.62, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6787878787878788, "acc_stderr": 0.0364620496325381, "acc_norm": 0.6787878787878788, "acc_norm_stderr": 0.0364620496325381 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7171717171717171, "acc_stderr": 0.03208779558786752, "acc_norm": 0.7171717171717171, "acc_norm_stderr": 0.03208779558786752 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8082901554404145, "acc_stderr": 0.02840895362624527, "acc_norm": 0.8082901554404145, "acc_norm_stderr": 0.02840895362624527 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.49743589743589745, "acc_stderr": 0.025350672979412195, "acc_norm": 0.49743589743589745, "acc_norm_stderr": 0.025350672979412195 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.02840653309060847, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.02840653309060847 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5210084033613446, "acc_stderr": 0.03244980849990029, "acc_norm": 0.5210084033613446, "acc_norm_stderr": 0.03244980849990029 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7431192660550459, "acc_stderr": 0.018732492928342465, "acc_norm": 0.7431192660550459, "acc_norm_stderr": 0.018732492928342465 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.41203703703703703, "acc_stderr": 0.03356787758160835, "acc_norm": 0.41203703703703703, "acc_norm_stderr": 0.03356787758160835 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.75, "acc_stderr": 0.03039153369274154, "acc_norm": 0.75, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7257383966244726, "acc_stderr": 0.029041333510598028, "acc_norm": 0.7257383966244726, "acc_norm_stderr": 0.029041333510598028 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6636771300448431, "acc_stderr": 0.031708824268455005, "acc_norm": 0.6636771300448431, "acc_norm_stderr": 0.031708824268455005 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6106870229007634, "acc_stderr": 0.04276486542814591, "acc_norm": 0.6106870229007634, "acc_norm_stderr": 0.04276486542814591 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04065578140908705, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04065578140908705 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6851851851851852, "acc_stderr": 0.04489931073591312, "acc_norm": 0.6851851851851852, "acc_norm_stderr": 0.04489931073591312 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6503067484662577, "acc_stderr": 0.03746668325470021, "acc_norm": 0.6503067484662577, "acc_norm_stderr": 0.03746668325470021 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.25892857142857145, "acc_stderr": 0.04157751539865629, "acc_norm": 0.25892857142857145, "acc_norm_stderr": 0.04157751539865629 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7991452991452992, "acc_stderr": 0.02624677294689048, "acc_norm": 0.7991452991452992, "acc_norm_stderr": 0.02624677294689048 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7777777777777778, "acc_stderr": 0.01486682166470959, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.01486682166470959 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6329479768786127, "acc_stderr": 0.02595005433765407, "acc_norm": 0.6329479768786127, "acc_norm_stderr": 0.02595005433765407 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4, "acc_stderr": 0.01638463841038082, "acc_norm": 0.4, "acc_norm_stderr": 0.01638463841038082 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6209150326797386, "acc_stderr": 0.027780141207023344, "acc_norm": 0.6209150326797386, "acc_norm_stderr": 0.027780141207023344 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6334405144694534, "acc_stderr": 0.027368078243971646, "acc_norm": 0.6334405144694534, "acc_norm_stderr": 0.027368078243971646 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6358024691358025, "acc_stderr": 0.026774929899722334, "acc_norm": 0.6358024691358025, "acc_norm_stderr": 0.026774929899722334 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.41134751773049644, "acc_stderr": 0.02935491115994098, "acc_norm": 0.41134751773049644, "acc_norm_stderr": 0.02935491115994098 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41395045632333766, "acc_stderr": 0.012579699631289265, "acc_norm": 0.41395045632333766, "acc_norm_stderr": 0.012579699631289265 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5441176470588235, "acc_stderr": 0.030254372573976722, "acc_norm": 0.5441176470588235, "acc_norm_stderr": 0.030254372573976722 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5522875816993464, "acc_stderr": 0.020116925347422425, "acc_norm": 0.5522875816993464, "acc_norm_stderr": 0.020116925347422425 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6181818181818182, "acc_stderr": 0.046534298079135075, "acc_norm": 0.6181818181818182, "acc_norm_stderr": 0.046534298079135075 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6285714285714286, "acc_stderr": 0.030932858792789848, "acc_norm": 0.6285714285714286, "acc_norm_stderr": 0.030932858792789848 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7412935323383084, "acc_stderr": 0.030965903123573037, "acc_norm": 0.7412935323383084, "acc_norm_stderr": 0.030965903123573037 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.4457831325301205, "acc_stderr": 0.03869543323472101, "acc_norm": 0.4457831325301205, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7719298245614035, "acc_stderr": 0.032180937956023566, "acc_norm": 0.7719298245614035, "acc_norm_stderr": 0.032180937956023566 }, "harness|truthfulqa:mc|0": { "mc1": 0.26560587515299877, "mc1_stderr": 0.015461027627253597, "mc2": 0.3918689435636596, "mc2_stderr": 0.013891603262173636 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
MouseTrap/maow_maow_dataset
2023-09-02T04:48:35.000Z
[ "region:us" ]
MouseTrap
null
null
null
0
0
Entry not found
ThursdayU/pomoseg
2023-09-02T05:05:00.000Z
[ "region:us" ]
ThursdayU
null
null
null
0
0
Entry not found
cinnachromaformula/cinnachroma-blood-sugar-support-formula
2023-09-02T05:16:31.000Z
[ "region:us" ]
cinnachromaformula
null
null
null
0
0
<h2><a href="https://www.globalfitnessmart.com/get-cinnachro"><span style="background-color: #ffcc00; color: blue;"><strong>{CinnaChroma- Official Website -- Order Now}</strong></span></a></h2> <h2><strong>➡️<span style="color: red;">● For Order Official Website - <a style="color: red;" href="https://www.globalfitnessmart.com/get-cinnachro">https://www.globalfitnessmart.com/get-cinnachro</a></span></strong><br /><strong>➡️<span style="color: #33cccc;">● Item Name: &mdash; <a style="color: #33cccc;" href="https://www.globalfitnessmart.com/get-cinnachro">CinnaChroma</a></span></strong><br /><strong>➡️<span style="color: #99cc00;">● Ingredients: &mdash; All Natural</span></strong><br /><strong>➡️<span style="color: #ffcc00;">● Incidental Effects: &mdash; NA</span></strong><br /><strong>➡️<span style="color: purple;">● Accessibility: &mdash; <a style="color: purple;" href="https://www.globalfitnessmart.com/get-cinnachro">Online</a></span></strong></h2> <h2><span style="background-color: #ffcc00; color: #0000ff;"><a style="background-color: #ffcc00; color: #0000ff;" href="https://www.globalfitnessmart.com/get-cinnachro"><strong>✅HUGE DISCOUNT ! HURRY UP! ORDER NOW!✅</strong></a></span><br /><span style="background-color: #ffcc00; color: #0000ff;"><a style="background-color: #ffcc00; color: #0000ff;" href="https://www.globalfitnessmart.com/get-cinnachro">✅HUGE DISCOUNT ! HURRY UP! ORDER NOW!✅</a></span><br /><span style="background-color: #ffcc00; color: #0000ff;"><a style="background-color: #ffcc00; color: #0000ff;" href="https://www.globalfitnessmart.com/get-cinnachro">✅HUGE DISCOUNT ! HURRY UP! ORDER NOW!✅</a></span></h2> <h2><strong>CinnaChroma Blood Sugar Support Formula Review</strong></h2> <p>CinnaChroma Blood Sugar Support Formula is a dietary supplement that provides the body with the nutrients it requires for glycemic control.An additional 84 million individuals have prediabetes, which, if unchecked, can progress to full-blown diabetes within five years. Around 114 million people in the United States have Type 2 diabetes or are at risk of acquiring it. And as you get older, your risk increases. The bulk of Type 2 Diabetes cases occur in middle-aged people, particularly those who have a sedentary lifestyle or are overweight.</p> <h2 style="text-align: center;"><span style="background-color: #ffcc00; color: #339966;"><a style="background-color: #ffcc00; color: #339966;" href="https://www.globalfitnessmart.com/get-cinnachro"><strong>SPECIAL PROMO[Limited Discount]: "<span style="color: #0000ff;">CinnaChroma</span> USA"Official Website!</strong></a></span></h2> <h2><strong>What is CinnaChroma Blood Sugar Support Formula?</strong></h2> <p>CinnaChroma Blood Sugar Support Formula is a dietary supplement that provides the body with the nutrients it requires for glycemic control. The manufacturer claims that this supplement combines the clinically proven blood sugar lowering properties of cinnamon and chromium in a single formulation.</p> <p>With the robust composition of a cinnamon-based sugar blocker, you have one of nature's metabolic powerhouses in your hands. It contains this traditional spice as well as five other nutrients that have been shown by modern medicine to aid in carb digestion and absorption.</p> <p>If used as directed, it may allow you to consume all of those "forbidden" foods without worry of acquiring diabetes. The unique mix of certain ingredients can enable proper sugar profile control in addition to assisting in the digestion of carbohydrates, sweets, and junk meals.</p> <p>Furthermore, it exclusively includes substances obtained from natural sources, which might help consumers without causing negative side effects. It is manufactured in the United States of America in an FDA-regulated facility using only high-quality ingredients. There are no fillers or byproducts in this supplement. Does CinnaChroma Blood Sugar Support Formula Really Manage Blood Sugar? Read Real Customer Reviews Here</p> <h2><strong>How does it work?</strong></h2> <p>High blood sugar levels should not be ignored. Instead, treatment should begin immediately as it can cause a lot of other health issues. If left addressed, this ailment might progress to more significant health problems. CinnaChroma Blood Sugar Support Formula is a wonderful option for preventing increasing blood sugar levels since it effectively maintains and controls them.</p> <p>The dietary supplement helps the body maintain balanced glucose levels in the system. This keeps people from developing health problems like obesity and type 2 diabetes. CinnaChroma Blood Sugar Support Formula is a nutritional blend that may boost metabolic rate, general health, and well-being. It combines six important components that work together to address the underlying cause of diabetes.</p> <p>According to the dietary supplement's official website, the innovative formula utilized is already potent enough that consumers do not need to follow tight diets or modify their lives. CinnaChroma Blood Sugar Support Formula also helps people lose weight in a healthy and natural way. It makes it simple to get rid of stubborn fat in the abdomen, thighs, arms, and other troublesome places.</p> <h2 style="text-align: center;"><span style="background-color: #ffcc00; color: #339966;"><a style="background-color: #ffcc00; color: #339966;" href="https://www.globalfitnessmart.com/get-cinnachro"><strong>SPECIAL PROMO: Get <span style="color: #993366;">CinnaChroma</span> at the Lowest Discounted Price Online</strong></a></span></h2> <h2><strong>How Cinnamon Works for Blood Sugar</strong></h2> <p>The supplement uses a specific type of cinnamon, known as "false cinnamon" or Chinese cassia, which has been linked to blood sugar benefits in multiple studies. CinnaChroma Blood Sugar Support Formula contains a 10:1 dose of cinnamon extract, which is equivalent to 10 servings of other cinnamon products.</p> <p>Cinnamon is a powerful ingredient that supports healthy blood sugar. According to studies cited by Barton Nutrition on the official CinnaChroma Blood Sugar Support Formula sales page, the cinnamon in the formula can lead to benefits like a decrease in blood sugar levels. In one study, 18 people with type 2 diabetes took cinnamon over 12 weeks, and all of the subjects in the cinnamon group had a statistically significant decrease in their blood sugar levels. Researchers in that study found that cinnamon led to a similar decrease in blood sugar as conventional drugs, but without the harmful side effects of traditional diabetes medication.</p> <p>One of the ingredients in CinnaChroma Blood Sugar Support Formula is chromium picolinate, which can curb appetite and reduce binge eating. This is particularly helpful for people with type 2 diabetes who may struggle with weight management. By controlling appetite and reducing binge eating, chromium picolinate can help people maintain a healthy weight and manage their blood sugar levels.</p> <h2><strong>Ingredients CinnaChroma Blood Sugar Support Formula<br /></strong></h2> <p>CinnaChroma Blood Sugar Support Formula is formulated with natural vitamins and minerals that complement any lifestyle and wellness regimen. CinnaChroma Blood Sugar Support Formula was created in collaboration with Barton Nutrition's medical and nutrition advisor, Dr. Scott Saunders.</p> <p>As stated previously, this product mixes cinnamon bark extract with additional health-promoting components to manage glucose levels. Here are a handful of CinnaChroma Blood Sugar Support Formula's health-promoting ingredients and their respective benefits:</p> <p><strong>#Cinnamon Bark</strong></p> <p>Cinnamon Bark is effective at reducing diabetes and cardiovascular disease risk factors. A 2003 clinical trial published in the journal Diabetes Care indicated that cassia cinnamon reduces blood glucose and cholesterol levels in type 2 diabetics. It is beneficial to your health when consumed in moderate doses.</p> <p><strong>#Chromium</strong></p> <p>Chromium supplements may assist diabetics in lowering their blood sugar levels. Chromium picolinate is the most effective supplement form of chromium. There is evidence that chromium can reduce glucose levels and enhance insulin sensitivity, however not all studies have demonstrated a benefit.</p> <p><strong>#Vanadium</strong></p> <p>Vanadium may aid diabetic neuropathy and alleviate pain caused by free radical damage. Multiple animal research and a few modest human trials suggest that vanadium may reduce blood sugar levels and increase insulin sensitivity in type 2 diabetics. In one research of individuals with type 2 diabetes, vanadium reduced total and LDL cholesterol levels.</p> <p><strong>#Selenium</strong></p> <p>Selenium is an indispensable trace element involved in the intricate mechanism of protection against oxidative stress. Selenium's antioxidant qualities may thereby inhibit the development of diabetes. Evidence suggests that appropriate selenium concentrations are essential for insulin secretion.</p> <p><strong>#Vitamin-K2</strong></p> <p>Vitamin K is clinically proven for its role in blood clotting. In addition, a number of human investigations have demonstrated that supplementation of vitamin-K2 improves insulin sensitivity. Vitamin K2 supplementation has been demonstrated to lessen the chance of developing diabetes.</p> <h2 style="text-align: center;"><span style="background-color: #ffcc00; color: #339966;"><a style="background-color: #ffcc00; color: #339966;" href="https://www.globalfitnessmart.com/get-cinnachro"><strong>(EXCLUSIVE OFFER)Click Here : "<span style="color: #ff6600;">CinnaChroma</span> USA"Official Website!</strong></a></span></h2> <h2><strong>Benefits CinnaChroma Blood Sugar Support Formula<br /></strong></h2> <p>CinnaChroma Blood Sugar Support Formula is a dietary supplement that may provide various health benefits, especially for individuals who struggle with high blood sugar levels and obesity. Its key ingredients work synergistically to maintain balanced glucose levels in the body and support healthy weight loss. Here are some of the benefits of using CinnaChroma Blood Sugar Support Formula:</p> <p><strong>#Maintains Healthy Blood Sugar Levels</strong></p> <p>One of the primary benefits of CinnaChroma Blood Sugar Support Formula is that it may help individuals maintain healthy blood sugar levels. Its key ingredient, cinnamon, has been shown to improve insulin sensitivity and reduce fasting blood glucose levels.</p> <p>Additionally, the other components in CinnaChroma Blood Sugar Support Formula, including chromium, biotin, magnesium, zinc, and vanadium, have also been shown to regulate blood sugar levels and improve glucose metabolism. By taking CinnaChroma Blood Sugar Support Formula regularly, individuals may be able to reduce their risk of developing diabetes and other health problems associated with high blood sugar levels.</p> <p><strong>#Promotes Healthy Weight Loss</strong></p> <p>CinnaChroma Blood Sugar Support Formula may also help individuals lose weight in a healthy and natural way. Its ingredients may help boost metabolism and promote fat burning, making it easier to shed stubborn fat in the abdomen, thighs, arms, and other troublesome areas. However, it is important to note that CinnaChroma Blood Sugar Support Formula is not a substitute for a healthy diet and regular exercise. It should be used as a supplement to a healthy lifestyle to support healthy weight loss.</p> <p><strong>#Enhances Insulin Sensitivity</strong></p> <p>Insulin sensitivity refers to how well the body's cells respond to insulin. High insulin sensitivity is beneficial as it helps the body use insulin effectively to maintain healthy blood sugar levels. CinnaChroma Blood Sugar Support Formula contains several ingredients that have been shown to enhance insulin sensitivity, including cinnamon and chromium. By improving insulin sensitivity, CinnaChroma Blood Sugar Support Formula may help individuals reduce their risk of developing insulin resistance and type 2 diabetes.</p> <p><strong>#Supports Cardiovascular Health</strong></p> <p>High blood sugar levels and obesity can increase the risk of developing cardiovascular disease. CinnaChroma Blood Sugar Support Formula may help support cardiovascular health by maintaining healthy blood sugar levels and promoting healthy weight loss.</p> <p>Its ingredients have also been shown to have antioxidant and anti-inflammatory properties, which may help reduce inflammation and oxidative stress in the body, two factors that contribute to cardiovascular disease.</p> <p><strong>#Improves Digestive Health</strong></p> <p>CinnaChroma Blood Sugar Support Formula may also improve digestive health by promoting healthy gut bacteria. Cinnamon, one of the key ingredients in CinnaChroma Blood Sugar Support Formula, has been shown to have antimicrobial properties that may help reduce harmful bacteria in the gut. Additionally, the other components in CinnaChroma Blood Sugar Support Formula, including magnesium and zinc, have also been shown to promote healthy digestion.</p> <p><strong>#Boosts Energy Levels</strong></p> <p>CinnaChroma Blood Sugar Support Formula's ingredients may help boost energy levels by supporting healthy metabolism. Cinnamon, for example, has been shown to enhance metabolic rate, which may lead to increased energy levels. Additionally, the other components in CinnaChroma Blood Sugar Support Formula, including biotin and magnesium, have also been shown to support healthy energy levels.</p> <p><strong>#Supports Bone Health</strong></p> <p>CinnaChroma Blood Sugar Support Formula may also support bone health by promoting healthy calcium absorption. Magnesium, one of the key ingredients in CinnaChroma Blood Sugar Support Formula, has been shown to help the body absorb calcium, which is essential for healthy bones. Additionally, the other components in CinnaChroma Blood Sugar Support Formula, including zinc and vanadium, have also been shown to support healthy bone density.</p> <p><strong>#May Improve Brain Function</strong></p> <p>CinnaChroma Blood Sugar Support Formula's ingredients may also have benefits for brain function. Cinnamon, for example, has been shown to improve cognitive function and memory. Additionally, the other components in CinnaChroma Blood Sugar Support Formula, including magnesium and zinc, have also been shown to support healthy brain function.</p> <h2 style="text-align: center;"><span style="background-color: #ffcc00; color: #339966;"><a style="background-color: #ffcc00; color: #339966;" href="https://www.globalfitnessmart.com/get-cinnachro"><strong>SPECIAL PROMO: Get <span style="color: #993366;">CinnaChroma</span> at the Lowest Discounted Price Online</strong></a></span></h2> <h2><strong>Scientific Evidence for CinnaChroma Blood Sugar Support Formula</strong></h2> <p>CinnaChroma Blood Sugar Support Formula contains a variety of ingredients, many of which have scientific evidence supporting their potential positive impact on blood sugar levels. According to reviews on the official CinnaChroma Blood Sugar Support Formula website, many users claim to have experienced significant drops in blood sugar and A1C scores after taking the supplement.</p> <p>However, it's important to note that cinnamon isn't a guaranteed solution for lowering blood sugar or fixing A1C scores. In a 2013 study, researchers tested the effects of 1g of cinnamon on a group of people with type 2 diabetes over a period of 60 days. The study found no significant difference in blood glucose levels between the group taking cinnamon and the placebo group.</p> <p>It's important to note that while CinnaChroma Blood Sugar Support Formula may have potential benefits for blood sugar, it's not a substitute for medical treatment for diabetes or other health conditions. Before taking any dietary supplement, it's always a good idea to consult with a healthcare provider to ensure it's safe and appropriate for your individual needs. <br />Guidelines for Consumers</p> <h2><strong>Where to Buy CinnaChroma Blood Sugar Support Formula?</strong></h2> <p>CinnaChroma Blood Sugar Support Formula is priced as low as $49 per bottle, depending on the number of bottles you order. The ordinary retail price is $67. All purchases are backed by a 365-day moneyback guarantee, and you can pay online using any major credit card.</p> <p>Here&rsquo;s how pricing breaks down when ordered through the official Barton Nutrition website today:</p> <ul> <li><strong>1 Bottle: $67 + Shipping</strong></li> <li><strong>3 Bottles: $177 + Free US Shipping</strong></li> <li><strong>6 Bottles: $294 + Free US Shipping</strong></li> </ul> <p>Each bottle contains 30 capsules, or a 30 day supply of CinnaChroma Blood Sugar Support Formula. You take one capsule daily to support blood sugar.</p> <h2><strong>Final Verdict CinnaChroma Blood Sugar Support Formula<br /></strong></h2> <p>In conclusion, CinnaChroma Blood Sugar Support Formula is a dietary supplement that claims to help people manage their blood sugar levels and improve symptoms of diabetes. The supplement contains a range of active ingredients that have been shown to support healthy blood sugar levels and prevent complications associated with diabetes.</p> <p>The largest and most important ingredient in CinnaChroma Blood Sugar Support Formula is cinnamon, which has been used for centuries to treat a range of health issues, including diabetes. Research has shown that cinnamon can reduce fasting blood glucose levels, triglycerides, LDL cholesterol, and total cholesterol. However, it's important to note that cinnamon is not a cure-all for diabetes, and its effects may vary depending on the individual.</p> <p>While the scientific evidence supporting the use of CinnaChroma Blood Sugar Support Formula is promising, more research is needed to determine its long-term effectiveness and safety. Some people may also experience side effects or allergic reactions to some of the ingredients in the supplement, so it's important to read the label carefully and follow the recommended dosage.</p> <p>Overall, CinnaChroma Blood Sugar Support Formula is a promising supplement for people looking to manage their blood sugar levels and improve symptoms of diabetes. However, it's important to approach any supplement with caution and to seek the advice of a healthcare provider before starting a new regimen.</p> <h2 style="text-align: center;"><span style="background-color: #ffcc00; color: #339966;"><a style="background-color: #ffcc00; color: #339966;" href="https://www.globalfitnessmart.com/get-cinnachro"><strong>Read This: "More Information From Knowledgeable Expertise of <span style="color: #0000ff;">CinnaChroma</span>"</strong></a></span></h2> <h2><span style="color: #339966;"># READ MORE</span></h2> <p><span style="background-color: #ffcc00; color: #339966;"><strong><a href="https://cinnachroma-official.clubeo.com/calendar/2023/09/02/cinnachroma-blood-sugar-support-formula-official-website-united-states">https://cinnachroma-official.clubeo.com/calendar/2023/09/02/cinnachroma-blood-sugar-support-formula-official-website-united-states</a></strong></span></p> <p><span style="background-color: #ffcc00; color: #339966;"><strong><a href="https://cinnachroma-official.clubeo.com/page/cinnachroma-reviews-is-legit-updated-2023-report.html">https://cinnachroma-official.clubeo.com/page/cinnachroma-reviews-is-legit-updated-2023-report.html</a></strong></span></p> <p><span style="background-color: #ffcc00; color: #339966;"><strong><a href="https://cinnachroma-official.clubeo.com/page/cinnachroma-blood-sugar-support-formula-reviews-viral-scam-or-legit-is-it-work-or-not.html">https://cinnachroma-official.clubeo.com/page/cinnachroma-blood-sugar-support-formula-reviews-viral-scam-or-legit-is-it-work-or-not.html</a></strong></span></p> <p><span style="background-color: #ffcc00; color: #339966;"><strong><a href="https://cinnachroma-official.clubeo.com/calendar/2023/09/02/cinnachroma-blood-sugar-support-formula-official-website-united-states">https://cinnachroma-official.clubeo.com/calendar/2023/09/02/cinnachroma-blood-sugar-support-formula-official-website-united-states</a></strong></span></p> <p><span style="background-color: #ffcc00; color: #339966;"><strong><a href="https://huggingface.co/cinnachromaformula/cinnachroma/blob/main/README.md">https://huggingface.co/cinnachromaformula/cinnachroma/blob/main/README.md</a></strong></span></p> <p>&nbsp;</p>
open-llm-leaderboard/details_yeontaek__llama-2-70B-ensemble-v4
2023-09-02T05:23:56.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of yeontaek/llama-2-70B-ensemble-v4 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [yeontaek/llama-2-70B-ensemble-v4](https://huggingface.co/yeontaek/llama-2-70B-ensemble-v4)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_yeontaek__llama-2-70B-ensemble-v4\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-02T05:22:36.145219](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__llama-2-70B-ensemble-v4/blob/main/results_2023-09-02T05%3A22%3A36.145219.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6965991479686341,\n\ \ \"acc_stderr\": 0.03128858284011723,\n \"acc_norm\": 0.7002900066329676,\n\ \ \"acc_norm_stderr\": 0.03126026366396146,\n \"mc1\": 0.4418604651162791,\n\ \ \"mc1_stderr\": 0.017384767478986218,\n \"mc2\": 0.6260206771095533,\n\ \ \"mc2_stderr\": 0.014926739687315194\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.6808873720136519,\n \"acc_stderr\": 0.013621696119173302,\n\ \ \"acc_norm\": 0.7090443686006825,\n \"acc_norm_stderr\": 0.01327307786590759\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6838279227245568,\n\ \ \"acc_stderr\": 0.0046403067196280675,\n \"acc_norm\": 0.8734315873332006,\n\ \ \"acc_norm_stderr\": 0.00331809357970292\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\ \ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5925925925925926,\n\ \ \"acc_stderr\": 0.042446332383532286,\n \"acc_norm\": 0.5925925925925926,\n\ \ \"acc_norm_stderr\": 0.042446332383532286\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.7763157894736842,\n \"acc_stderr\": 0.033911609343436025,\n\ \ \"acc_norm\": 0.7763157894736842,\n \"acc_norm_stderr\": 0.033911609343436025\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.69,\n\ \ \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \ \ \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.7433962264150943,\n \"acc_stderr\": 0.026880647889051982,\n\ \ \"acc_norm\": 0.7433962264150943,\n \"acc_norm_stderr\": 0.026880647889051982\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.8541666666666666,\n\ \ \"acc_stderr\": 0.02951424596429177,\n \"acc_norm\": 0.8541666666666666,\n\ \ \"acc_norm_stderr\": 0.02951424596429177\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \ \ \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.54,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n\ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \ \ \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6763005780346821,\n\ \ \"acc_stderr\": 0.0356760379963917,\n \"acc_norm\": 0.6763005780346821,\n\ \ \"acc_norm_stderr\": 0.0356760379963917\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201943,\n\ \ \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201943\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.76,\n \"acc_stderr\": 0.04292346959909281,\n \"acc_norm\": 0.76,\n\ \ \"acc_norm_stderr\": 0.04292346959909281\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.6680851063829787,\n \"acc_stderr\": 0.03078373675774565,\n\ \ \"acc_norm\": 0.6680851063829787,\n \"acc_norm_stderr\": 0.03078373675774565\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n\ \ \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.45614035087719296,\n\ \ \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n\ \ \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.4708994708994709,\n \"acc_stderr\": 0.025707658614154954,\n \"\ acc_norm\": 0.4708994708994709,\n \"acc_norm_stderr\": 0.025707658614154954\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.5238095238095238,\n\ \ \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.5238095238095238,\n\ \ \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \ \ \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8225806451612904,\n\ \ \"acc_stderr\": 0.021732540689329276,\n \"acc_norm\": 0.8225806451612904,\n\ \ \"acc_norm_stderr\": 0.021732540689329276\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.5369458128078818,\n \"acc_stderr\": 0.035083705204426656,\n\ \ \"acc_norm\": 0.5369458128078818,\n \"acc_norm_stderr\": 0.035083705204426656\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932262,\n \"acc_norm\"\ : 0.78,\n \"acc_norm_stderr\": 0.04163331998932262\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.8606060606060606,\n \"acc_stderr\": 0.027045948825865376,\n\ \ \"acc_norm\": 0.8606060606060606,\n \"acc_norm_stderr\": 0.027045948825865376\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.8737373737373737,\n \"acc_stderr\": 0.023664359402880236,\n \"\ acc_norm\": 0.8737373737373737,\n \"acc_norm_stderr\": 0.023664359402880236\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.9481865284974094,\n \"acc_stderr\": 0.01599622932024412,\n\ \ \"acc_norm\": 0.9481865284974094,\n \"acc_norm_stderr\": 0.01599622932024412\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.7205128205128205,\n \"acc_stderr\": 0.022752388839776826,\n\ \ \"acc_norm\": 0.7205128205128205,\n \"acc_norm_stderr\": 0.022752388839776826\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3296296296296296,\n \"acc_stderr\": 0.02866120111652459,\n \ \ \"acc_norm\": 0.3296296296296296,\n \"acc_norm_stderr\": 0.02866120111652459\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.7941176470588235,\n \"acc_stderr\": 0.02626502460827588,\n \ \ \"acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.02626502460827588\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.46357615894039733,\n \"acc_stderr\": 0.04071636065944215,\n \"\ acc_norm\": 0.46357615894039733,\n \"acc_norm_stderr\": 0.04071636065944215\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.8880733944954129,\n \"acc_stderr\": 0.013517352714958792,\n \"\ acc_norm\": 0.8880733944954129,\n \"acc_norm_stderr\": 0.013517352714958792\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.6064814814814815,\n \"acc_stderr\": 0.03331747876370312,\n \"\ acc_norm\": 0.6064814814814815,\n \"acc_norm_stderr\": 0.03331747876370312\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.8970588235294118,\n \"acc_stderr\": 0.02132833757080437,\n \"\ acc_norm\": 0.8970588235294118,\n \"acc_norm_stderr\": 0.02132833757080437\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.890295358649789,\n \"acc_stderr\": 0.020343400734868834,\n \ \ \"acc_norm\": 0.890295358649789,\n \"acc_norm_stderr\": 0.020343400734868834\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.8161434977578476,\n\ \ \"acc_stderr\": 0.025998379092356513,\n \"acc_norm\": 0.8161434977578476,\n\ \ \"acc_norm_stderr\": 0.025998379092356513\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.8320610687022901,\n \"acc_stderr\": 0.032785485373431386,\n\ \ \"acc_norm\": 0.8320610687022901,\n \"acc_norm_stderr\": 0.032785485373431386\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.859504132231405,\n \"acc_stderr\": 0.031722334260021585,\n \"\ acc_norm\": 0.859504132231405,\n \"acc_norm_stderr\": 0.031722334260021585\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n\ \ \"acc_stderr\": 0.03755265865037182,\n \"acc_norm\": 0.8148148148148148,\n\ \ \"acc_norm_stderr\": 0.03755265865037182\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.8159509202453987,\n \"acc_stderr\": 0.030446777687971726,\n\ \ \"acc_norm\": 0.8159509202453987,\n \"acc_norm_stderr\": 0.030446777687971726\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5892857142857143,\n\ \ \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.5892857142857143,\n\ \ \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n\ \ \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8931623931623932,\n\ \ \"acc_stderr\": 0.02023714900899093,\n \"acc_norm\": 0.8931623931623932,\n\ \ \"acc_norm_stderr\": 0.02023714900899093\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8659003831417624,\n\ \ \"acc_stderr\": 0.012185528166499978,\n \"acc_norm\": 0.8659003831417624,\n\ \ \"acc_norm_stderr\": 0.012185528166499978\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7514450867052023,\n \"acc_stderr\": 0.023267528432100174,\n\ \ \"acc_norm\": 0.7514450867052023,\n \"acc_norm_stderr\": 0.023267528432100174\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.6502793296089385,\n\ \ \"acc_stderr\": 0.015949308790233645,\n \"acc_norm\": 0.6502793296089385,\n\ \ \"acc_norm_stderr\": 0.015949308790233645\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7418300653594772,\n \"acc_stderr\": 0.025058503316958154,\n\ \ \"acc_norm\": 0.7418300653594772,\n \"acc_norm_stderr\": 0.025058503316958154\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7556270096463023,\n\ \ \"acc_stderr\": 0.024406162094668893,\n \"acc_norm\": 0.7556270096463023,\n\ \ \"acc_norm_stderr\": 0.024406162094668893\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.8240740740740741,\n \"acc_stderr\": 0.021185893615225163,\n\ \ \"acc_norm\": 0.8240740740740741,\n \"acc_norm_stderr\": 0.021185893615225163\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.5673758865248227,\n \"acc_stderr\": 0.029555454236778845,\n \ \ \"acc_norm\": 0.5673758865248227,\n \"acc_norm_stderr\": 0.029555454236778845\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5919165580182529,\n\ \ \"acc_stderr\": 0.012552598958563671,\n \"acc_norm\": 0.5919165580182529,\n\ \ \"acc_norm_stderr\": 0.012552598958563671\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.7095588235294118,\n \"acc_stderr\": 0.02757646862274054,\n\ \ \"acc_norm\": 0.7095588235294118,\n \"acc_norm_stderr\": 0.02757646862274054\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.7516339869281046,\n \"acc_stderr\": 0.017479487001364764,\n \ \ \"acc_norm\": 0.7516339869281046,\n \"acc_norm_stderr\": 0.017479487001364764\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n\ \ \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n\ \ \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7795918367346939,\n \"acc_stderr\": 0.02653704531214529,\n\ \ \"acc_norm\": 0.7795918367346939,\n \"acc_norm_stderr\": 0.02653704531214529\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8756218905472637,\n\ \ \"acc_stderr\": 0.023335401790166327,\n \"acc_norm\": 0.8756218905472637,\n\ \ \"acc_norm_stderr\": 0.023335401790166327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197769,\n \ \ \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197769\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n\ \ \"acc_stderr\": 0.03889951252827216,\n \"acc_norm\": 0.5180722891566265,\n\ \ \"acc_norm_stderr\": 0.03889951252827216\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8538011695906432,\n \"acc_stderr\": 0.027097290118070813,\n\ \ \"acc_norm\": 0.8538011695906432,\n \"acc_norm_stderr\": 0.027097290118070813\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4418604651162791,\n\ \ \"mc1_stderr\": 0.017384767478986218,\n \"mc2\": 0.6260206771095533,\n\ \ \"mc2_stderr\": 0.014926739687315194\n }\n}\n```" repo_url: https://huggingface.co/yeontaek/llama-2-70B-ensemble-v4 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|arc:challenge|25_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hellaswag|10_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T05:22:36.145219.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T05:22:36.145219.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T05_22_36.145219 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T05:22:36.145219.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T05:22:36.145219.parquet' - config_name: results data_files: - split: 2023_09_02T05_22_36.145219 path: - results_2023-09-02T05:22:36.145219.parquet - split: latest path: - results_2023-09-02T05:22:36.145219.parquet --- # Dataset Card for Evaluation run of yeontaek/llama-2-70B-ensemble-v4 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/yeontaek/llama-2-70B-ensemble-v4 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [yeontaek/llama-2-70B-ensemble-v4](https://huggingface.co/yeontaek/llama-2-70B-ensemble-v4) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_yeontaek__llama-2-70B-ensemble-v4", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-02T05:22:36.145219](https://huggingface.co/datasets/open-llm-leaderboard/details_yeontaek__llama-2-70B-ensemble-v4/blob/main/results_2023-09-02T05%3A22%3A36.145219.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6965991479686341, "acc_stderr": 0.03128858284011723, "acc_norm": 0.7002900066329676, "acc_norm_stderr": 0.03126026366396146, "mc1": 0.4418604651162791, "mc1_stderr": 0.017384767478986218, "mc2": 0.6260206771095533, "mc2_stderr": 0.014926739687315194 }, "harness|arc:challenge|25": { "acc": 0.6808873720136519, "acc_stderr": 0.013621696119173302, "acc_norm": 0.7090443686006825, "acc_norm_stderr": 0.01327307786590759 }, "harness|hellaswag|10": { "acc": 0.6838279227245568, "acc_stderr": 0.0046403067196280675, "acc_norm": 0.8734315873332006, "acc_norm_stderr": 0.00331809357970292 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5925925925925926, "acc_stderr": 0.042446332383532286, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.042446332383532286 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7763157894736842, "acc_stderr": 0.033911609343436025, "acc_norm": 0.7763157894736842, "acc_norm_stderr": 0.033911609343436025 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7433962264150943, "acc_stderr": 0.026880647889051982, "acc_norm": 0.7433962264150943, "acc_norm_stderr": 0.026880647889051982 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.8541666666666666, "acc_stderr": 0.02951424596429177, "acc_norm": 0.8541666666666666, "acc_norm_stderr": 0.02951424596429177 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6763005780346821, "acc_stderr": 0.0356760379963917, "acc_norm": 0.6763005780346821, "acc_norm_stderr": 0.0356760379963917 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04690650298201943, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04690650298201943 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909281, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909281 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6680851063829787, "acc_stderr": 0.03078373675774565, "acc_norm": 0.6680851063829787, "acc_norm_stderr": 0.03078373675774565 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.04685473041907789, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.04685473041907789 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555498, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555498 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4708994708994709, "acc_stderr": 0.025707658614154954, "acc_norm": 0.4708994708994709, "acc_norm_stderr": 0.025707658614154954 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.5238095238095238, "acc_stderr": 0.04467062628403273, "acc_norm": 0.5238095238095238, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8225806451612904, "acc_stderr": 0.021732540689329276, "acc_norm": 0.8225806451612904, "acc_norm_stderr": 0.021732540689329276 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5369458128078818, "acc_stderr": 0.035083705204426656, "acc_norm": 0.5369458128078818, "acc_norm_stderr": 0.035083705204426656 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.78, "acc_stderr": 0.04163331998932262, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932262 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8606060606060606, "acc_stderr": 0.027045948825865376, "acc_norm": 0.8606060606060606, "acc_norm_stderr": 0.027045948825865376 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8737373737373737, "acc_stderr": 0.023664359402880236, "acc_norm": 0.8737373737373737, "acc_norm_stderr": 0.023664359402880236 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9481865284974094, "acc_stderr": 0.01599622932024412, "acc_norm": 0.9481865284974094, "acc_norm_stderr": 0.01599622932024412 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.7205128205128205, "acc_stderr": 0.022752388839776826, "acc_norm": 0.7205128205128205, "acc_norm_stderr": 0.022752388839776826 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3296296296296296, "acc_stderr": 0.02866120111652459, "acc_norm": 0.3296296296296296, "acc_norm_stderr": 0.02866120111652459 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7941176470588235, "acc_stderr": 0.02626502460827588, "acc_norm": 0.7941176470588235, "acc_norm_stderr": 0.02626502460827588 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.46357615894039733, "acc_stderr": 0.04071636065944215, "acc_norm": 0.46357615894039733, "acc_norm_stderr": 0.04071636065944215 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8880733944954129, "acc_stderr": 0.013517352714958792, "acc_norm": 0.8880733944954129, "acc_norm_stderr": 0.013517352714958792 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.6064814814814815, "acc_stderr": 0.03331747876370312, "acc_norm": 0.6064814814814815, "acc_norm_stderr": 0.03331747876370312 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8970588235294118, "acc_stderr": 0.02132833757080437, "acc_norm": 0.8970588235294118, "acc_norm_stderr": 0.02132833757080437 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.890295358649789, "acc_stderr": 0.020343400734868834, "acc_norm": 0.890295358649789, "acc_norm_stderr": 0.020343400734868834 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.8161434977578476, "acc_stderr": 0.025998379092356513, "acc_norm": 0.8161434977578476, "acc_norm_stderr": 0.025998379092356513 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8320610687022901, "acc_stderr": 0.032785485373431386, "acc_norm": 0.8320610687022901, "acc_norm_stderr": 0.032785485373431386 }, "harness|hendrycksTest-international_law|5": { "acc": 0.859504132231405, "acc_stderr": 0.031722334260021585, "acc_norm": 0.859504132231405, "acc_norm_stderr": 0.031722334260021585 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037182, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037182 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.8159509202453987, "acc_stderr": 0.030446777687971726, "acc_norm": 0.8159509202453987, "acc_norm_stderr": 0.030446777687971726 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5892857142857143, "acc_stderr": 0.04669510663875191, "acc_norm": 0.5892857142857143, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8931623931623932, "acc_stderr": 0.02023714900899093, "acc_norm": 0.8931623931623932, "acc_norm_stderr": 0.02023714900899093 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8659003831417624, "acc_stderr": 0.012185528166499978, "acc_norm": 0.8659003831417624, "acc_norm_stderr": 0.012185528166499978 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7514450867052023, "acc_stderr": 0.023267528432100174, "acc_norm": 0.7514450867052023, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.6502793296089385, "acc_stderr": 0.015949308790233645, "acc_norm": 0.6502793296089385, "acc_norm_stderr": 0.015949308790233645 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7418300653594772, "acc_stderr": 0.025058503316958154, "acc_norm": 0.7418300653594772, "acc_norm_stderr": 0.025058503316958154 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7556270096463023, "acc_stderr": 0.024406162094668893, "acc_norm": 0.7556270096463023, "acc_norm_stderr": 0.024406162094668893 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.8240740740740741, "acc_stderr": 0.021185893615225163, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.021185893615225163 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5673758865248227, "acc_stderr": 0.029555454236778845, "acc_norm": 0.5673758865248227, "acc_norm_stderr": 0.029555454236778845 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5919165580182529, "acc_stderr": 0.012552598958563671, "acc_norm": 0.5919165580182529, "acc_norm_stderr": 0.012552598958563671 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7095588235294118, "acc_stderr": 0.02757646862274054, "acc_norm": 0.7095588235294118, "acc_norm_stderr": 0.02757646862274054 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7516339869281046, "acc_stderr": 0.017479487001364764, "acc_norm": 0.7516339869281046, "acc_norm_stderr": 0.017479487001364764 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7795918367346939, "acc_stderr": 0.02653704531214529, "acc_norm": 0.7795918367346939, "acc_norm_stderr": 0.02653704531214529 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8756218905472637, "acc_stderr": 0.023335401790166327, "acc_norm": 0.8756218905472637, "acc_norm_stderr": 0.023335401790166327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197769, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197769 }, "harness|hendrycksTest-virology|5": { "acc": 0.5180722891566265, "acc_stderr": 0.03889951252827216, "acc_norm": 0.5180722891566265, "acc_norm_stderr": 0.03889951252827216 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8538011695906432, "acc_stderr": 0.027097290118070813, "acc_norm": 0.8538011695906432, "acc_norm_stderr": 0.027097290118070813 }, "harness|truthfulqa:mc|0": { "mc1": 0.4418604651162791, "mc1_stderr": 0.017384767478986218, "mc2": 0.6260206771095533, "mc2_stderr": 0.014926739687315194 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
qingyuyang/Fetal_Planes_DB_E6
2023-09-02T06:07:09.000Z
[ "license:openrail", "region:us" ]
qingyuyang
null
null
null
0
0
--- license: openrail configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': Trans-cerebellum '1': Trans-thalamic '2': Trans-ventricular splits: - name: train num_bytes: 427622548.144 num_examples: 1972 download_size: 413633435 dataset_size: 427622548.144 ---
TahmidH/GrammarNSpell
2023-09-04T02:07:39.000Z
[ "language:bn", "license:cc0-1.0", "region:us" ]
TahmidH
null
null
null
0
0
--- license: cc0-1.0 language: - bn pretty_name: h ---
Sneka/finetune-3.5
2023-09-02T06:02:45.000Z
[ "region:us" ]
Sneka
null
null
null
0
0
Entry not found
Slichi/universe
2023-09-02T06:15:37.000Z
[ "license:openrail", "region:us" ]
Slichi
null
null
null
0
0
--- license: openrail ---
nampdn-ai/pile-1
2023-09-02T06:19:56.000Z
[ "region:us" ]
nampdn-ai
null
null
null
0
0
Entry not found
menorescueupdate/WellMeMenoRescue
2023-09-02T06:43:35.000Z
[ "region:us" ]
menorescueupdate
null
null
null
0
0
<h1 style="text-align: justify;"><span style="background-color: #993300; color: white;"><a style="background-color: #993300; color: white;" href="https://www.healthsupplement24x7.com/get-menorescue"><strong>➤➤MenoRescue &ndash; Official Website Link &ndash; Click Here</strong></a></span></h1> <p style="text-align: justify;"><strong>➤➤ Product Name - <a href="https://cinnachromareviews.blogspot.com/2023/09/menorescue-menstrual-supporter.html">MenoRescue</a><br /></strong></p> <p style="text-align: justify;"><strong>➤➤ Quantity Per Bottle - 60 Capsules/Jar<br /></strong></p> <p style="text-align: justify;"><strong>➤➤ Category - Menstrual Supporter<br /></strong></p> <p style="text-align: justify;"><strong>➤➤ Compostion - Natural Components Only</strong></p> <p style="text-align: justify;"><strong>➤➤ Results - In Few Days</strong></p> <p style="text-align: justify;"><strong>➤➤ Availability &amp; Price &ndash; Visit Official Website <span style="background-color: #993300; color: white;"><a style="background-color: #993300; color: white;" href="https://www.healthsupplement24x7.com/get-menorescue">www.MenoRescue.com</a></span></strong></p> <p style="text-align: justify;"><strong>➤➤ Rating: - 4.8/5.0 ★★★★☆</strong></p> <h3 style="text-align: justify;"><span style="background-color: #993300; color: white;"><a style="background-color: #993300; color: white;" href="https://www.healthsupplement24x7.com/get-menorescue">✅<strong>Click Here To Visit &ndash; OFFICIAL WEBSITE</strong>✅</a></span></h3> <h3 style="text-align: justify;"><span style="background-color: #993300; color: white;"><a style="background-color: #993300; color: white;" href="https://www.healthsupplement24x7.com/get-menorescue">✅<strong>Click Here To Visit &ndash; OFFICIAL WEBSITE</strong>✅</a></span></h3> <h3 style="text-align: justify;"><span style="background-color: #993300; color: white;"><a style="background-color: #993300; color: white;" href="https://www.healthsupplement24x7.com/get-menorescue">✅<strong>Click Here To Visit &ndash; OFFICIAL WEBSITE</strong>✅</a></span></h3> <p style="text-align: justify;"><strong>Short review on <a href="https://menorescue-official.blogspot.com/2023/09/menorescue-menstrual-supporter.html">MenoRescue</a> :</strong> Are you looking for a reliable supplement to combat painstaking menopause symptoms? <a href="https://sites.google.com/view/well-me-menorescue-/home">MenoRescue</a> may help! Enriched with potent and science-baked natural ingredients, this easy-to-consume dietary supplement may help you achieve improved holistic help. Read on and find out the reasons to invest in <a href="https://colab.research.google.com/drive/1gmCHzOSWptBU-5tNZrJ41p6A81KDSL8A">MenoRescue</a>!</p> <h2 style="text-align: center;"><span style="background-color: white; font-weight: normal;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEjuOzpHuov5NBfJ3S2-ldBLfwcKwI4kYFnXZoy59WA_U74jPHHEuQOP_ZBc54G_HNSKJaoutrXdvlOwfwnskoExBReI0dykkqDuBMo8exc7sxc-5gFKQQZ5VbgTe_tvChVocWvHkXszKNwLt-ZcFiJHqRukDWLdYE2f9m155sLGZFvMj9j4rOA1XWEo5HYi/w640-h622/WellMe%20MenoRescue%2010.png" alt="" width="640" height="622" border="0" data-original-height="545" data-original-width="560" /></a></span></h2> <p style="text-align: justify;"><a href="https://groups.google.com/g/well-me-menorescue-official/c/oMyhlkvNG9w">MenoRescue</a> is a premium hormone booster blend. This dietary supplement aims to combat the challenging side effects of menopause. This organic supplement claims to restore the essential female hormones to the optimal level, helping you to sleep better and feel calm. In addition, it&rsquo;s expected to address the root cause of hormonal fluctuations. Consequently, you may achieve improved overall health.</p> <h2 style="text-align: center;"><span style="background-color: #993300; color: white;"><a style="background-color: #993300; color: white;" href="https://www.healthsupplement24x7.com/get-menorescue"><strong>➦➦Visit The Official Website To Get MenoRescue Now!</strong></a></span></h2> <h2 style="text-align: justify;"><strong>What is Well Me MenoRescue&trade;?</strong></h2> <p style="text-align: justify;"><a href="https://lookerstudio.google.com/reporting/0b962f7c-632b-45a7-9a38-a630b7f8455e">MenoRescue</a>&trade; is a dietary supplement designed to help women overcome the negative symptoms of menopause. As the name implies, this all-natural supplement works to balance hormones so that women can get back to sleep, feel calm and in control of their emotions, and think clearly. In fact, it is said that the current approach gets rid of the underlying cause for hormonal fluctuations. By doing so, other aspects of health might be improved as well. Let&rsquo;s pause for a moment to consider the origins of these uncomfortable symptoms before moving on to the positives.</p> <h2 style="text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEig-t7P6hu5DzM6QoEj9WZ2SiANrqy0WJ7ucL9pEXF0fkUEZOs3lJ1KyCQ1_5IYU-WoRIJAwexnzc4zWV_jkpEAiJ8Lfz5sKc-RqblPVvxhhwN_YWVGA5CwxksnuKlsQ1FfsrEsNyMxukNr6ujVVnsUtpAylPIK2YJzT-hW5WjXCy-hEr9F4KSd9dvUDq_2/w640-h630/WellMe%20MenoRescue%2013.jpg" alt="" width="640" height="630" border="0" data-original-height="1102" data-original-width="1118" /></a></h2> <h2 style="text-align: justify;"><strong>How does Well Me MenoRescue&trade; work?</strong></h2> <p style="text-align: justify;">Health professionals have long claimed that low levels of oestrogen and progesterone are to blame for menopause&rsquo;s debilitating side effects. However, recent findings point to a different perspective that would make it impossible for the aforementioned pair to travel together. To be more specific, it appears that the stress hormone cortisol is the primary culprit. Cortisol is thought to improve memory, the immune system, and assist control blood pressure in the appropriate quantities.</p> <p style="text-align: justify;">When cortisol levels exceed normal ranges, the &ldquo;fight-or-flight&rdquo; reaction is triggered. Unfortunately, the brain struggles to distinguish between real threats and everyday stressors during trying moments. And so, it tells the body to continually release cortisol. Of course, stopping the production of estrogen and progesterone is the opportunity cost of all of this. Worst of all, cortisol influences how fat is distributed and, if it is present in excess, may encourage fat storage, namely in the belly.</p> <h2 style="text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEiBFpFAifJdSMl9K0IgBYQUyEc5yCx4A5aHS9QWCEj91yeFXdKbfFlEuUuy4gpyCygYBkHd3pxVydZK3T8_qN5qHnJHEiFmMV2XkB6WmQj0Hp_IqVO0kBgP_YA3WBVhnhIA8givXukuUdagagik4JyhsTC5N48u5ANytl7nr4x3oWY83UR0iOFHR_Y9f65V/w640-h232/WellMe%20MenoRescue%202.png" alt="" width="640" height="232" border="0" data-original-height="628" data-original-width="1735" /></a></h2> <p style="text-align: justify;">Ultimately, <a href="https://menorescue-updates.clubeo.com/calendar/2023/09/01/menorescue-official-reviews-2023-price-updates">MenoRescue</a>&trade; has been formulated with 9 rare and exotic ingredients thought to reduce excess cortisol levels, protecting women from frequent hot flashes, mood swings, hunger pangs, and fat gain, among other symptoms. Now that we have a broad idea of how <a href="https://menorescue-updates.clubeo.com/page/well-me-menorescue-is-it-real-or-fake-read-article-2023.html">MenoRescue</a>&trade; is supposed to function, let&rsquo;s explore its contents in more detail.</p> <h2 style="text-align: justify;"><strong>Ingredints in Well Me MenoRescue.</strong></h2> <p style="text-align: justify;"><strong>Greenselect&reg; Phytosome:</strong> This caffeine-free green tea formula is said to have antioxidant properties, helping your body to fight oxidative stress. Its green tea component may help lower BMI, facilitating healthy weight loss.</p> <p style="text-align: justify;"><strong>Schisandra berry extract:</strong> These sweet and salty fruits boast a sour and pungent flavor. They contain a substance that offers different therapeutic benefits like stress reduction and anxiety control. Besides, it may help comfort menopausal&rsquo;s symptoms like palpitation, hot flashes, excessive sweating, etc.</p> <h2 style="text-align: center;"><span style="background-color: white; font-weight: normal;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgi2Pj0LWDz9bHCt0bFoVRGd74kDbjI5GN5F6-xa3j7hzXqTY1Xd8yqnNxmhmnJV9wCSHPodAjccwLThBHX96-FzvbgFNikWsvNJNxVA2GF7rd9HPiM4SgR2E2lk7Ozan9YkImsOExhRo9UWePhonAAT_MVC_rlqYrjzrymHzffzQI8FrjF05yULZP7uTzi/w640-h314/WellMe%20MenoRescue%203.png" alt="" width="640" height="314" border="0" data-original-height="687" data-original-width="1400" /></a></span></h2> <p style="text-align: justify;"><strong>Black cohosh powder:</strong> The manufacturer of MenoResuce sources this ingredient from a specific flowering plant. Traditional medicines use this ingredient as a phytoestrogen &ndash; a substance mimicking the actions of natural estrogen hormones. This herb may help boost fertility and restore hormonal balance.</p> <p style="text-align: justify;"><strong>Black pepper extract:</strong> Though black pepper extract is not outwardly connected with estrogen-boosting functions, it may help boost the bioavailability of the other ingredients in <a href="https://menorescue-updates.clubeo.com/page/well-me-menorescue-exclusive-sale-all-discounts-available-for-short-period.html">MenoRescue</a>.</p> <p style="text-align: justify;"><strong>Sensoril&reg;:</strong> This trademarked ashwagandha extract comes from the leaves of the Withania somnifera plant. Different studies suggest that Sensoril&reg; may effectively bring down stress. Besides, it may help boost immunity.</p> <h2 style="text-align: center;"><span style="background-color: #993300; color: white;"><a style="background-color: #993300; color: white;" href="https://www.healthsupplement24x7.com/get-menorescue"><strong>➦➦Visit the Official Website Today and Grab Your Bottle!</strong></a></span></h2> <h2 style="text-align: justify;"><strong>Key benefits of Well Me MenoRescue.</strong></h2> <p style="text-align: justify;"><a href="https://menorescue-updates.clubeo.com">MenoRescue</a> deals with the menopausal misery of women by working on the following:</p> <ul style="text-align: justify;"> <li><strong>Cortisol Levels :</strong> Sensoril&reg; is a multi-purpose ingredient in this supplement. It is a patented form of the ancient herb Ashwagandha which is popular in the medical field for its unique ability to promote healthy cortisol levels. Several peer-reviewed clinical trials support its efficacy in managing cortisol levels. Green Tea is another ingredient that helps in this.</li> <li><strong>Mood Swings :</strong> The entire combination of ingredients used in its making is such that reduced stress and depression are highly likely outcomes. For instance, Rhodiola powder is thought to help the body deal with stress better.</li> <li><strong>Hot Flashes :</strong> Red clover is a traditional medicine used to cure menopausal symptoms. Hot flashes and night sweats are one of them. There is a shortage of scientific research on the potential benefits of red clover. However, ancient medical sciences used it for treating hot flashes.</li> </ul> <h2 style="text-align: center;"><span style="background-color: white; font-weight: normal;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEjctyDoAJUUVhejcYIRqn4jsQ8GJRjH815NMIEUo5iEll8-vRJGpcF7CoTJ1Yqyr1iELBzEFGRrr-pVid-8F9-MvDvfMrWo4QbNHsB17Y7eENrEYWQcLeKlydFlzc0VGC3-oYunqvmHvtZXQbM2EWfiZeT7TK7I2ce3VISJs5RBDDxReckTEPAhEKAvFDcN/w640-h268/WellMe%20MenoRescue%206.jpg" alt="" width="640" height="268" border="0" data-original-height="738" data-original-width="1764" /></a></span></h2> <ul style="text-align: justify;"> <li><strong>Restful Sleep :</strong> As claimed, it treats hormonal imbalance, thus making cortisol levels healthy. A peaceful good night&rsquo;s sleep is a natural consequence.</li> <li><strong>Joint Flexibility :</strong> According to the manufacturer, this dietary supplement may help promote joint health and comfort. Further, it may help reduce muscle soreness and improve joint blood circulation.</li> <li><strong>Energy Levels :</strong> Hormones playing a roller coaster could mean havoc on energy levels. One feels tired, and all-day fatigue is common. MenoRescue may help regulate energy levels by giving your body an energy boost.</li> <li><strong>Excess Body Fat :</strong> No pill or juice is a magical remedy for fat burn. Various studies and clinical trials have found Greenselect Phytosome to be a highly safe and effective tool for reducing fat and shrinking that extra belly. Moreover, it&rsquo;s a caffeine-devoid green tea extract. Hence MenoRescue may help in not only weight loss but also firing up the metabolism.</li> </ul> <h2 style="text-align: center;"><span style="background-color: #993300; color: white;"><a style="background-color: #993300; color: white;" href="https://www.healthsupplement24x7.com/get-menorescue"><strong>➦➦Get your&nbsp; MenoRescue Here &amp; Get Great Discount!</strong></a></span></h2> <h2 style="text-align: justify;"><strong>Pros of Well Me MenoRescue.<br /></strong></h2> <ul style="text-align: justify;"> <li><strong>Research-backed dietary supplement.</strong></li> <li>A blend of naturally occurring ingredients.</li> <li><strong>Vegetarian and vegan-friendly.</strong></li> <li>Gentle on the body.</li> <li><strong>&ldquo;Guaranteed pure,&rdquo; i.e., no GMO and BPA.</strong></li> <li>Easy to take.</li> <li><strong>A complete package for all-menopause issues.</strong></li> <li>State-of-the-art manufacturing.</li> <li><strong>Follows current good manufacturing practices (cGMP).</strong></li> </ul> <h2 style="text-align: justify;"><strong>Cons of Well Me MenoRescue.<br /></strong></h2> <ul style="text-align: justify;"> <li><strong>It&rsquo;s not a quick remedy.</strong></li> <li>Comes at a premium price (when discounts and offers aren&rsquo;t there).</li> <li><strong>Some ingredients have limited research support as they&rsquo;re ancient by nature.</strong></li> </ul> <h2 style="text-align: justify;"><strong>Dosage recommended of Well Me MenoRescue by experts.</strong></h2> <h2 style="text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEh0tyKSmMKwvBxPeAu5nDCT1O3QLbN7K3H5Mq7tMZ16qy-1BQeuTr2CPV8FlX4BIOnGRTSWEc9Ra20xK8sgd6WluMeQ_4y_xjZ24atfcIelWrKB2K1tNm7VZMnhqlcyDdcSwacbeXyAsY0nJ9TLb6bcq68lycSVBk2QTgopXguISkd2zk5QYScab8FvGEEj/w640-h480/WellMe%20MenoRescue%209.png" alt="" width="640" height="480" border="0" data-original-height="1050" data-original-width="1400" /></a></h2> <p style="text-align: justify;">Each bottle of MenoRescue contains 60 capsules. According to the manufacturer&rsquo;s recommendation, two capsules daily after breakfast is the suitable dosage. Thus, one bottle is sufficient for 30 days.This supplement is vegan-friendly and free from dairy, gluten, nuts, eggs, soy, sugar, and crustaceans. Hence, it claims that even women allergic to these can take the tablets without any worries.Moreover, manufacturers say it is free from GMOs (genetically modified organisms) and BPA (Bisphenol-A), making it safer to consume.</p> <h2 style="text-align: justify;"><strong>Detailed pricing of Well Me MenoRescue.</strong></h2> <p style="text-align: justify;">MenoRescue is only available on the manufacturer&rsquo;s official website for buying purposes. The supplement comes in a standard one-month supply. Besides, there is a special bundle plan consisting of two different packages. These are for 3 and 6 months, respectively. Stocking up is not a concern as the product has a shelf life of two years. Thus, it is easy to tailor and personalize one&rsquo;s order.</p> <p style="text-align: justify;">MenoRescue&trade; is currently available at three different pricing levels. Specifically:</p> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEjBa5DNie2Tgsm72HoiOyiiTKOUJDrZVD4X_WzYnU_d-24bCJgrk1T89gTFHELB8FmerpbYVazJrmnoXwae2LBp3wx-iHAL7t4U30Zpy-6nIqodIK-uIDLltQB2-S3Gqpnl9upKoIMTsh41p5V9lAYzrcOIB8KF3GYi8P_xaqzSxnTWEEypG3GORvBjOQOO/w640-h376/31103531_web1_M3-KIR-20221123MenoRescue-Pricing-per-Bottle.jpg" alt="" width="640" height="376" border="0" data-original-height="750" data-original-width="1280" /></a></div> <ul style="text-align: justify;"> <li><strong>Sample Package ➣➣ 1 MenoRescue&trade; bottle: $59 each + USD 9.95 USA shipping.</strong></li> <li><strong>Most Popular ➣➣ 3 MenoRescue&trade; bottles: $49 each + USD 9.95 USA shipping.</strong></li> <li><strong><span style="background-color: #339966; color: white;">Best Value ➣➣6 MenoRescue&trade; bottles: $39 each + free USA shipping.</span>&nbsp;<span style="color: green;">✔✔</span></strong></li> </ul> <h2 style="text-align: center;"><span style="background-color: #993300; color: white;"><a style="background-color: #993300; color: white;" href="https://www.healthsupplement24x7.com/get-menorescue"><strong>➦➦Order Here Your MenoRescue &amp; Grab The Big Discount Right Now!</strong></a></span></h2> <h2 style="text-align: justify;"><strong>Is there any Money Back Guarantee on Well Me MenoRescue.</strong></h2> <p style="text-align: justify;">Yes, a 180-day money-back guarantee is in place for MenoRescue&trade;. This guarantee will be valid regardless of the bundle chosen at checkout and whether the bottles are used or unopened. To qualify, people need to get in touch with customer support within the specified time.</p> <h2 style="text-align: justify;"><strong>FAQ's on Well Me MenoRescue.</strong></h2> <p style="text-align: justify;"><strong>Q. What makes Well Me MenoRescue unique?</strong></p> <p style="text-align: justify;"><strong>Ans.</strong> Common menopause remedies focus on the traditional approach of augmenting the declining levels of estrogen and progesterone. Interestingly, scientific researchers have recently found the root cause of menopause struggles in the daily cortisol spikes. MenoRescue, with its advanced hormone support formula, focuses on achieving healthy cortisol levels. This, in turn, leads to a range of other menopausal health benefits.</p> <p style="text-align: justify;"><strong>Q. Can women with food allergies take Well Me MenoRescue?</strong></p> <p style="text-align: justify;"><strong>Ans.</strong> The good news is that MenoRescue is dairy and gluten-free! Not just this, but there is no use of sugar, soy, nuts, eggs, or crustaceans in the manufacturing process. Furthermore, the ingredients are purely herbal and vegan-friendly. The manufacturers intend to ensure that more and more women across the globe can reap the benefits of this supplement. Thus, even women allergic to the foods mentioned above can take it.</p> <p style="text-align: justify;"><strong>Q.How many bottles should be ordered?</strong></p> <p style="text-align: justify;"><strong>Ans.</strong> For beginners, their standard one-month plan serves the purpose. However, the body needs to absorb the ingredients in any nutritional supplement to show results. This requires some time and consistency in consumption. Thus, it might be preferable to go for their special bundle plan with 3 and 6 months packages to get real transformation. Additional savings work like the cherry on top.</p> <h2 style="text-align: justify;"><strong>Final words on Well Me MenoRescue.</strong></h2> <p style="text-align: justify;">Menopause can be a distressing experience for women. Although it affects every woman differently, certain physical, mental and emotional changes are common. Given this phase&rsquo;s challenge, a risk-free solution is incredibly desirable.Therefore, MenoRescue is a dietary supplement formula designed to change the misery of menopause symptoms into comfort for women. It&rsquo;s based on the modern finding of science that excessive cortisol levels can disturb the hormonal balance. This, in turn, results in more severe symptoms.This supplement has exhaustive and in-depth research behind its manufacturing. Some lesser-known yet age-old natural herbs and extracts are used as their ingredients. Moreover, each ingredient has multiple advantages of its own.</p> <h2 style="text-align: center;"><span style="background-color: #993300; color: white;"><a style="background-color: #993300; color: white;" href="https://www.healthsupplement24x7.com/get-menorescue"><strong>➦➦Just Click Here To Visit the Official Website &amp; Buy MenoRescue!</strong></a></span></h2> <p><a href="https://cinnachromareviews.blogspot.com/2023/09/menorescue-menstrual-supporter.html">https://cinnachromareviews.blogspot.com/2023/09/menorescue-menstrual-supporter.html</a></p> <p><a href="https://menorescue-official.blogspot.com/2023/09/menorescue-menstrual-supporter.html">https://menorescue-official.blogspot.com/2023/09/menorescue-menstrual-supporter.html</a></p> <p><a href="https://sites.google.com/view/well-me-menorescue-/home">https://sites.google.com/view/well-me-menorescue-/home</a></p> <p><a href="https://colab.research.google.com/drive/1gmCHzOSWptBU-5tNZrJ41p6A81KDSL8A">https://colab.research.google.com/drive/1gmCHzOSWptBU-5tNZrJ41p6A81KDSL8A</a></p> <p><a href="https://groups.google.com/g/well-me-menorescue-official/c/oMyhlkvNG9w">https://groups.google.com/g/well-me-menorescue-official/c/oMyhlkvNG9w</a></p> <p><a href="https://lookerstudio.google.com/reporting/0b962f7c-632b-45a7-9a38-a630b7f8455e">https://lookerstudio.google.com/reporting/0b962f7c-632b-45a7-9a38-a630b7f8455e</a></p> <p><a href="https://menorescue-updates.clubeo.com">https://menorescue-updates.clubeo.com</a></p>
dariolopez/Llama-2-oasst1-es-test-format
2023-09-02T06:58:25.000Z
[ "region:us" ]
dariolopez
null
null
null
0
0
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: text dtype: string splits: - name: train num_bytes: 4680420 num_examples: 3909 download_size: 2537064 dataset_size: 4680420 --- # Dataset Card for "Llama-2-oasst1-es-test-format" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
polymath707/indollama2-without-emoji
2023-09-02T07:03:24.000Z
[ "license:apache-2.0", "region:us" ]
polymath707
null
null
null
0
0
--- license: apache-2.0 ---
wellmemenorescue/MenoRescue
2023-09-02T07:13:45.000Z
[ "region:us" ]
wellmemenorescue
null
null
null
0
0
<h1 style="text-align: left;">WellMe MenoRescue</h1> <p><span style="font-family: georgia;"><strong>&bull; Product Name - MenoRescue<br /></strong></span></p> <p><span style="font-family: georgia;"><strong>&bull; Side Effects - No Side Effects (100% Natural)</strong></span></p> <p><span style="font-family: georgia;"><strong>&bull; Main Benefits - Fix Menopause &amp; Hormonal Balance<br /></strong></span></p> <p><span style="font-family: georgia;"><strong>&bull; Category - Menopause Supplements<br /></strong></span></p> <p><span style="font-family: georgia;"><strong>&bull; Results - In 2-3 weeks</strong></span></p> <p><span style="font-family: georgia;"><strong>&bull; Availability - Online</strong></span></p> <p><span style="font-family: georgia;"><strong>&bull; Customer Reviews -&nbsp; ★★★★✰ 4.9/5</strong></span></p> <p><span style="font-family: georgia;"><strong>&bull; Price - Visit <a href="https://www.healthsupplement24x7.com/get-menorescue">Official Website</a></strong></span></p> <h3 style="text-align: center;"><span style="font-family: georgia;"><strong><span style="color: red;"><span style="background-color: #ffe599;"><a href="https://www.healthsupplement24x7.com/get-menorescue">Get Huge Discount Now!!</a></span></span></strong></span></h3> <h3 style="text-align: center;"><a href="https://www.healthsupplement24x7.com/get-menorescue"><strong><span style="font-family: georgia;"><span style="background-color: #fff2cc;"><span style="color: red;">Special Discount- As Low As On MenoRescue &ndash; Get Your Best Discount Online Hurry!!</span></span></span></strong></a></h3> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEizZi1vJXISJmyvl4CLTYKpk8wJoBdqstfv7N-6-1Nu6rwJRRbTK1JveYVDfm5iBZJRSh4OEQrksR-yR-mRnnMqlk2Op1eDVemygzUzDLBWPFXDBUiwOGg6ZjT7rawAtmhHWl43PmB814lW_5h5cY4owckTXlle1ehnDk0FeHUnpjI8XE3DWE_3k2lcVniF/w640-h402/WellMe%20MenoRescue%201.jpg" alt="" width="640" height="402" border="0" data-original-height="815" data-original-width="1296" /></a></div> <p><a href="https://www.podcasts.com/menorescue">MenoRescue</a> is a menopause relief supplement from WellMe designed to help you escape the menopause roller coaster.</p> <p>By taking two capsules of the doctor-formulated supplement daily, you can purportedly support healthy hormone levels using a blend of handpicked ingredients based on Ivy League research.</p> <p>Does MenoRescue really help with menopause? How does MenoRescue work? Keep reading to find out everything you need to know about WellMe&rsquo;s MenoRescue and its effects.</p> <h2 style="text-align: center;"><span style="font-family: georgia;"><strong><span style="background-color: #b6d7a8;"><span style="color: red;">!<a href="https://www.healthsupplement24x7.com/get-menorescue">!! HURRY UP !!</a></span></span></strong></span></h2> <h2 style="text-align: center;"><span style="font-family: georgia;"><strong><a href="https://www.healthsupplement24x7.com/get-menorescue">GET <span style="color: red;">SPECIAL OFFER</span> THIS WEEK SALE</a></strong></span></h2> <h2 style="text-align: left;"><strong>What is <a href="https://sites.google.com/view/wellme-menorescue-reviews/">MenoRescue</a>?</strong></h2> <p>Women endure different hormonal changes in their life, starting with puberty. While everyone goes through this initial change, women then face challenges for every month of their reproductive lives as their menstrual cycle repeats. This cycle continues to change hormones even further when they become pregnant, dropping drastically after the baby is born. The final stage of this hormonal change is menopause.</p> <p>Menopause is hard for any woman to handle, and symptoms can start years before the actual change. Unfortunately, this process is quite tricky to operate as the transition changes everything they are used to.</p> <p><a href="https://menorescue.clubeo.com/page/wellme-menorescue-advanced-formula-to-fix-menopause-cycle-and-hormonal-imalancing-spam-or-legit.html">MenoRescue</a> offers the chance to stop going through all of the pain and discomfort that most people think is synonymous with aging. Using natural ingredients, this formula helps consumers to deal with common problems like hot flashes, night sweats, and mood swings. While many of these problems arise because of the loss of estrogen production, MenoRescue helps consumers to improve their hormone levels. It even allows consumers to reduce the weight gain that can come with menopause.</p> <p>While menopausal women struggle to get a good night's sleep, using MenoRescue helps ease the stress. Instead of waking constantly, consumers can relax and feel refreshed in the morning. This formula is made to help consumers feel better and enjoy their life more, rather than enduring the constant struggle that menopause creates.</p> <p>MenoRescue is not exclusively about getting relief from the typical menopause symptoms. Along with eliminating the adverse effects that interrupt daily living, consumers will find that their sex drive gets a substantial boost without losing estrogen. They also will find it easier to maintain and lose weight with the effects of proper estrogen production. By adding MenoRescue, consumers can revive the life they had before menopause to enjoy it once more.</p> <h3 style="text-align: left;"><span style="font-family: georgia;"><strong>Must See : <span style="background-color: #ffe599;"><span style="color: red;"><a href="https://www.healthsupplement24x7.com/get-menorescue">Visit the Official Site MenoRescue Discount Available Here</a></span></span></strong></span></h3> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEiGEKBmi07yAnlTVV1aUjh0k2oXjppoCMYwUwHD2XegZEsxv3pgdgj6bg5gsHFRcBwcyKTNWpHBIHq2xNt5ndEhL4pUlmmA-RnFhOKAxz_jfT-VNakt8h0tazDRktmCbp43Z8YUrTYmJfAjmUdEO2ORYT6R-x3VSNmG-NhGW2ObCkE_u7lnqyq7j3e0v2Gn/w479-h359/WellMe%20MenoRescue%209.png" alt="" width="479" height="359" border="0" data-original-height="1050" data-original-width="1400" /></a></div> <h2 style="text-align: left;"><strong>How <a href="https://www.eventcreate.com/e/wellme-menorescue">MenoRescue</a> Works?</strong></h2> <p><a href="https://wellme-menorescue.hashnode.dev/wellme-menorescue-advanced-formula-to-fix-menopause-cycle-and-hormonal-imalancingspam-or-legit">MenoRescue</a> works by supporting healthy hormone levels.</p> <p>When going through menopause, your body is experiencing significant hormonal changes. Estrogen and progesterone levels plummet, and cortisol levels rise starting in your late 40s. This can lead to noticeable symptoms of menopause.</p> <p>MenoRescue is designed to help using natural ingredients. Each serving includes well-known ingredients like ashwagandha and green tea along with lesser-known ingredients like black cohosh and red clover.</p> <p>Instead of injecting yourself with hormones or undergoing invasive menopause relief solutions, you can take two capsules of MenoRescue daily. The supplement is designed to support healthy hormone balance while also helping with energy, fat burning, body temperature, mood, heart health, joint health, sleep, and more.</p> <h3 style="text-align: left;"><strong style="font-family: georgia;">Also Read: <span style="background-color: #ffe599; color: red;"><a href="https://www.healthsupplement24x7.com/get-menorescue">The Complete Report On MenoRescue Controversy That Will Blow Your Mind</a></span></strong></h3> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgGVlIp8d6_BeHrY79S4c-ElVrIw44CTy-cJQHD4xuspl1bYeniBJqsDJ7b7CFDjYx--kPIjUTKzqfogA5kZcQp_jHztlrUGXLFzqcQaniDA9bKHPc4-cH-xeNkMrsAgLltbjVU7wjcbqotdVwuabLhApm-XzTLwUdMs_a-DtIvWXDjRYbJHtasdoA7axEF/w511-h275/WellMe%20MenoRescue%2015.png" alt="" width="511" height="275" border="0" data-original-height="751" data-original-width="1400" /></a></div> <h2 style="text-align: left;"><strong>How <a href="https://wellme-menorescue.mystrikingly.com/">MenoRescue</a> Promotes Hormone Balance?</strong></h2> <p><strong><a href="https://haitiliberte.com/advert/wellme-menorescue-advanced-formula-to-fix-menopause-cycle-and-hormonal-imalancingspam-or-legit/">MenoRescue</a> promotes hormone balance via a two-step process:</strong></p> <p><strong>First, MenoRescue promotes healthy cortisol levels to encourage a healthy balance of estrogen and progesterone</strong></p> <p>Cortisol, the stress hormone, causes imbalances across the rest of your hormones. Unfortunately, unhealthy cortisol levels make problems even worse over time. Your estrogen and progesterone levels are already declining in your late 40s. Cortisol makes it worse, putting you on an estrogen and progesterone roller coaster . MenoRescue uses a blend of natural ingredients to promote healthy cortisol levels and help restore balance to your hormones.</p> <p><strong>Second, MenoRescue gently supports the body&rsquo;s production of estrogen and progesterone</strong></p> <p>After promoting healthy cortisol levels, MenoRescue can make it easier for your body to balance estrogen and progesterone on its own. To encourage your body further, MenoRescue contains a blend of ingredients that can help your body produce its own estrogen and progesterone, making it easier to restore overall balance.</p> <p>Because of these two effects, MenoRescue aims to provide a range of benefits &ndash; from defense against hot flashes to better fat burning and more.</p> <h3 style="text-align: center;"><strong><span style="background-color: #ffe599; color: red; font-family: georgia;"><a href="https://www.healthsupplement24x7.com/get-menorescue">Shop Smart, Save Big: Unleash the Power of Discounts</a></span></strong></h3> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEhqe0XhQGK57pWgsq0bEzi-JmpmB35NmGqzsnJHeYdcQWBnYNL3LkMQ5rUGLSpfD5aXEX3ndXkeZzdkjU-2RUVJ_ZLOf7ixmGWIz58NBlOHowesf1WojyVuQWeyQtHuvEdjWLaSWdaRmXrms5LyiaswLVgXia7iBnpZYnHUxArSQyJ3EvLpDbxfoKsYTwMC/w640-h232/WellMe%20MenoRescue%202.png" alt="" width="640" height="232" border="0" data-original-height="628" data-original-width="1735" /></a></div> <h2 style="text-align: left;"><strong>Ingredients Used In <a href="https://wellme-menorescue.jimdosite.com/">MenoRescue</a><br /></strong></h2> <p>MenoRescue contains a blend of plant, herbs, vitamins, minerals, nutrients, and other active ingredients linked to health and wellness.</p> <p><strong>Here are all of the active ingredients in MenoRescue and how they work, according to <a href="https://www.podcasts.com/menorescue/episode/wellme-menorescue-advanced-formula-to-fix-menopause-cycle-and-hormonal-imalancingspam-or-legit">MenoRescue</a>:</strong></p> <h2 style="text-align: left;"><strong>Hormone Support Blend</strong></h2> <p>First, MenoRescue contains four ingredients to support the first phase of the formula: supporting your body&rsquo;s normal balance of cortisol. Cortisol, the stress hormone, is linked to weight gain, estrogen imbalance, and many menopause-like symptoms. High cortisol levels aren&rsquo;t good for you. MenoRescue contains four herbs and plants as part of its Hormone Support Blend, helping to support healthy cortisol levels:</p> <p><strong>Sensoril:</strong> One of the most important ingredients in MenoRescue is Sensoril, which is a proprietary type of ashwagandha designed for maximum purity and absorption. Sensoril contains a specific type of ashwagandha sourced from organic farms in India and Nepal. Multiple studies show taking 125mg of Sensoril &ndash; the same dose in MenoRescue &ndash; is associated with better control of cortisol and greater overall stress response. In fact, many people take ashwagandha daily on its own for stress relief, and ashwagandha is known for its adaptogenic effects. According to WellMe, the Sensoril in MenoRescue can boost energy, regulate mood, aid concentration, support restful sleep, increase muscle strength and stamina, reduce muscle soreness, support heart health, and promote joint pain relief, among other benefits.</p> <p><strong>Greenselect Phytosome:</strong> MenoRescue contains an ingredient called Greenselect Phytosome. It&rsquo;s a combination of decaffeinated green tea extract with a unique &ldquo;phytosome&rdquo; technology to boost absorption. Green tea is widely believed to be one of the world&rsquo;s healthiest beverages. It&rsquo;s rich with antioxidants linked to energy, weight loss, and overall health and wellness. According to WellMe, citing several clinical trials, green tea can also promote healthy cortisol levels and help you lose the dreaded &ldquo;meno-belly.&rdquo;</p> <p><strong>Rhodiola Rosea:</strong> Rhodiola rosea, like ashwagandha (Sensoril) has a long history of use as an adaptogen in traditional medicine, including Ayurvedic medicine practiced across South Asia for centuries. Found in high altitude mountain ranges, Rhodiola rosea has been linked to a growing number of physical and cognitive effects. Studies show Rhodiola rosea can help with cognitive function, body temperature, energy, mood, muscle performance, circulation, joint comfort, and more.</p> <p><strong>Schisandra Berry:</strong> MenoRescue contains schisandra, which has a long history of use in traditional Chinese medicine, where it was used as a general wellness supplement. Today, we know schisandra berry likely works because it&rsquo;s rich with vitamin C, one of nature&rsquo;s best antioxidants. According to WellMe, schisandra is linked to specific menopause-fighting effects, including cognitive health, memory, mood, energy, muscle strength, and relief from hot flashes and night sweats, among other effects.</p> <h3 style="text-align: left;"><strong style="font-family: georgia;"><a href="https://www.healthsupplement24x7.com/get-menorescue">ALERT! <span style="background-color: #ffe599; color: red;">Also, Know the Effects of Ingredients Before Consuming them in Your Daily Routine!</span></a></strong></h3> <h2 style="text-align: left;"><strong>Hormone Booster Blend</strong></h2> <p>The other ingredients in <a href="https://menorescue.clubeo.com/calendar/2023/09/01/menorescue-hormone-booster-blend-stay-energized-and-focused-menopause-supplements-you-need-work-or-hoax">MenoRescue</a> are part of the Hormone Booster Blend. They&rsquo;re designed to boost levels of estrogen and progesterone. After the first four ingredients support healthy cortisol levels, the next ingredients boost hormones like estrogen and progesterone, helping you target symptoms of estrogen. This blend includes, according to WellMe, &ldquo;the world&rsquo;s purest and most potent phytoestrogens.&rdquo; Phytoestrogens are plant-based chemicals that act similar to the estrogen produced by your body. You can boost estrogen without resorting to invasive solutions &ndash; like hormone injections. The five ingredients in MenoRescue&rsquo;s Hormone Booster Blend include:</p> <p><strong>Sage Leaf:</strong> Sage leaf is an evergreen shrub and member of the mint family believed to support estrogen and progesterone levels. One 8 week study showed 300mg of sage leaf, the same dose in MenoRescue, promoted a healthy body temperature in a group of menopausal women. Sage leaf has a long history of use by traditional medicine practitioners around the world for a variety of ailments &ndash; including menopause.</p> <p><strong>Red Clover:</strong> Red clover is native to parts of Europe, western Asia, and Africa. The flowering plant is rich with natural chemicals called isoflavones. Studies show isoflavones have a similar chemical structure to estrogen. They&rsquo;re phytoestrogens that could help your body promote estrogen production without side effects. One 12-week study found 80mg of red clover &ndash; the same dose used in MenoRescue &ndash; led to a &ldquo;remarkable difference&rdquo; in the health and happiness of a group of menopausal women. Those women also reported a 50% reduction in menopause symptoms based on the Official Menopause Rating Scale (MRS), a set of 11 common menopause symptoms.</p> <p><strong>Black Cohosh:</strong> Black cohosh is native to North America, and indigenous people throughout the United States and Canada have used black cohosh for medicinal purposes for centuries. Today, growing research has connected black cohosh to menopause relief, including energy, mood, body temperature, sleep, and sex drive benefits, among other perks.</p> <p><strong>Chasteberry:</strong> Chasteberry, also known as vitex, Abraham&rsquo;s balm, or monk&rsquo;s pepper, is found in many menopause relief supplements because of its connection to progesterone. Some studies show chasteberry can support healthy progesterone levels, which could help with certain menopause symptoms. One 8 week study, for example, found a group of women experienced &ldquo;significantly lower&rdquo; intensity symptoms of menopause when taking chasteberry compared to a group of women taking a placebo.</p> <p><strong>BioPerine:</strong> BioPerine, or black pepper extract, can boost the absorption of other active ingredients in the formula. Many supplements contain black pepper extract specifically for absorption. Some studies have shown black pepper can boost the absorption of natural ingredients by up to 2,000%. That means fewer wasted ingredients &ndash; and maximum effectiveness for the formula you&rsquo;re paying for within MenoRescue.</p> <p>WellMe has packaged all of these ingredients into a veggie capsule. You take two capsules daily for menopause relief.</p> <h3 style="text-align: left;"><a href="https://www.healthsupplement24x7.com/get-menorescue"><span style="background-color: #ffe599;"><span style="color: red;"><span style="font-family: georgia;"><strong>To Learn More about Premium MenoRescue Ingredients in Detail, Click Here to Head to Its Official Website</strong></span></span></span></a></h3> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEhZEZg50f7WUZ5-sDdbafZFIXUJH943yHDFF0G13xCVJpUrAci7jysfr9xV3RU02VcHqPlrl9M6GQtx4S60UarEhyGP_TiPn1u3K6_Vl5dEMObQRo2YcAKhJCInLhxa_IB5alSYARbnFq-EHGXtaq85LlW47_FYQnkC4mZsKEbFOHzpL7oyb85MhA4NGHCr/w640-h314/WellMe%20MenoRescue%203.png" alt="" width="640" height="314" border="0" data-original-height="687" data-original-width="1400" /></a></div> <h2 style="text-align: left;"><strong>Life-Changing Benefits From <a href="https://wellme-menorescue-reviews.company.site/">MenoRescue</a> Supplement</strong></h2> <p>The effects of <a href="https://soundcloud.com/wellme-menorescue/wellme-menorescue-advanced-formula-to-fix-menopause-cycle-and-hormonal-imalancing">MenoRescue</a> can be life-changing for many women. When you are in menopause, you may notice that the symptoms you experience get in the way of life. Hot flashes, brain fog, night sweats, and several other symptoms can make it difficult to get a good night&rsquo;s sleep, while also interfering with your ability to focus on work and do your daily chores.</p> <p><strong>Here are some of the most important benefits that you can expect from MenoRescue:</strong></p> <p>The supplement doesn&rsquo;t raise the risk of heart disease and other complications that have been associated with hormone replacement therapy.</p> <p>A specialized formula targets the causes of menopause and curbs the symptoms effectively.</p> <p>Hormonal balance improves, which helps to ensure there isn&rsquo;t a sudden drop in your estrogen levels.</p> <p>Most women experience noticeable effects when they have taken MenoRescue for as little as seven days.</p> <p>All of the ingredients in the supplement have gone through extensive clinical testing and studies.</p> <p>Another important factor to consider is that MenoRescue is great for all phases of menopause. You can start to use the supplement prior to menopause and continue to take the capsules even in the postmenopausal stage.</p> <h3 style="text-align: left;"><strong style="font-family: georgia;">Must See this Report: <a href="https://www.healthsupplement24x7.com/get-menorescue"><span style="background-color: #ffe599; color: red;">How Does the MenoRescue Formula Works? This May Change Your Mind!</span></a></strong></h3> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgMTQo2VIGITLbtBqhz0cUbo_xonRQAafea8Q5HJy7N6nQ4P7olA5PtjHrtd4XmgrViQDWywAzZ8Nx7SXzNp-5afB4mnDb2Ard5DlfHKg8rqOn7z1f21vhRBDUJvPNiM72WwVuFyuX_5FX2AjfsxWe8gM4lQtWJwudR_pPmFqYHKmMkbjv7bjMXv6Nsuac4/w640-h268/WellMe%20MenoRescue%206.jpg" alt="" width="640" height="268" border="0" data-original-height="738" data-original-width="1764" /></a></div> <h2 style="text-align: left;"><strong>Are There Any Side Effects Of Using <a href="https://wellme-menorescue.webflow.io/">WellMe MenoRescue</a>?</strong></h2> <p><a href="https://erectin-usa-news.clubeo.com/calendar/2023/09/01/wellme-menorescue-menopause-supplement-get-rid-from-hot-flashes-and-improve-focus-and-energy-levels-spam-or-legit">MenoRescue</a> is a safe supplement that is tested and approved. Many women who have used this product for a long time have not reported any side effect. If you use the formula in the right way, you should only expect positive results with zero side effects. This makes it one of the best supplements on the market to handle menopause symptoms.</p> <h3 style="text-align: center;"><strong><span style="background-color: #ffe599; color: red; font-family: georgia;"><a href="https://www.healthsupplement24x7.com/get-menorescue">"Limited Time, Maximum Savings Grab Your Discount Today!"</a></span></strong></h3> <h2 style="text-align: left;"><strong>Advantages of <a href="https://lookerstudio.google.com/reporting/476cf65a-af28-4bd5-affe-7e6866bb9863">WellMe MenoRescue</a>:</strong></h2> <p><strong>Symptom Relief:</strong> Menopause supplements can alleviate common symptoms like hot flashes, mood swings, and sleep disturbances.</p> <p><strong>Hormonal Balance:</strong> They may help rebalance hormone levels, reducing the impact of hormonal fluctuations.</p> <p><strong>Bone Health:</strong>&nbsp;WellMe MenoRescue contain calcium and vitamin D, promoting bone health and reducing the risk of osteoporosis.</p> <p><strong>Heart Health:</strong> WellMe MenoRescues can support cardiovascular health during the menopausal transition.</p> <p><strong>Convenience:</strong> Supplements are easy to incorporate into a daily routine, providing a consistent source of essential nutrients.</p> <p><strong>Natural Ingredients:</strong> WellMe MenoRescue contain natural ingredients, offering a holistic approach to symptom management.</p> <h2 style="text-align: left;"><strong>Disadvantages of <a href="https://colab.research.google.com/drive/13p4smYwI-nHOHAh8oMEykz17A9gTFinl?usp=sharing">WellMe MenoRescue</a>:</strong></h2> <p>&bull; Only available through the special promotional deal on the official online store.</p> <p>&bull; Requires four to six weeks of regular supplementation to see results.</p> <p>&bull; Limited stock; the pricing deal will expire eventually.</p> <p>&bull; Results may vary from person to person.</p> <h3 style="text-align: center;"><strong><span style="background-color: #ffe599; color: red; font-family: georgia;"><a href="https://www.healthsupplement24x7.com/get-menorescue">"More Value, Less Cost: Experience Our Unbeatable Discounts"</a></span></strong></h3> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEhXmBKcXXKPjR0XRX5nUE-psxlP5eNscZsJ43DpUcQkRk9b0YVFBXBZw49JmY5JqGh2zbdTnXxfnTzX960LOi1X2hA3VTABBHErGrjPTwGmdoKOWBG-zg6dwIrT8_NsNLhinyf-VldR9N_pRK-asth4kJPGw_i60pjISIY5Oh0N22UpZXKYs1LMIcr20x_O/w640-h319/WellMe%20MenoRescue%205.png" alt="" width="640" height="319" border="0" data-original-height="605" data-original-width="1221" /></a></div> <h2 style="text-align: left;"><strong>How To Take <a href="https://bitbucket.org/menorescue/menorescue/issues/1/wellme-menorescue-advanced-formula-to-fix">WellMe MenoRescue</a>?</strong></h2> <p>This is one of the easiest to use supplements for women. You need to take two tablets twice in a day. You need to take the tablets with a full glass of water. It is advisable to take the tablets 15 minutes before breakfast and around one hour before you sleep at night. Make sure not to use more than the recommended dose for better results.</p> <p>If you are experiencing hot flashes, painful monthly periods and many other menopause-related symptoms, you should see positive results just after ten days. However, the results vary from person to person. Many women are happy to report that hot flashes were gone just after using the product consistently for two weeks. Depending on your body and health, you should see positive results after one month of consistent use.</p> <h3 style="text-align: left;"><span style="font-family: georgia;"><strong>READ ALSO: <a href="https://www.healthsupplement24x7.com/get-menorescue"><span style="background-color: #ffe599;"><span style="color: red;">Does the MenoRescue Work For Everyone? Before you buy, read real customer reviews and testimonials!!</span></span></a></strong></span></h3> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgZoQIX0ZK-byC1mG6UftAPx5P_Uvximn9hZg95Coux-tQsLDykMghvABxuwS9Ef_xUn0rc_LINiymIt-mFGAbD8R86RUoMqQiYYwsZLP53Vnszb0E5JR38M35YhChIX8XWiI8wBhG178MlmORPDLdu-9zhmjvExU0oe4HAPm-jc6SdC_LIxl34vVqC_42M/w640-h240/WellMe%20MenoRescue%207.jpg" alt="" width="640" height="240" border="0" data-original-height="458" data-original-width="1220" /></a></div> <h2 style="text-align: left;"><strong>Where To Buy <a href="https://menorescue.bandcamp.com/track/wellme-menorescue-advanced-formula-to-fix-menopause-cycle-and-hormonal-imalancing-spam-or-legit">WellMe MenoRescue</a>?</strong></h2> <p>MenoRescue is exclusively manufactured by WellMe. It&rsquo;s only available through this website&mdash;not in stores&mdash;which allows us to cut out the middleman and pass those savings onto you.</p> <p>To help you stock up, save more money, and achieve real, life-changing results we&rsquo;ve created a special bundle plan consisting of 3 and 6 month packages to go alongside the standard one month starter option.</p> <p>Your order will be shipped out right away and will arrive at your door within 5-7 business days (for U.S. locations). Orders placed outside the U.S. may take up to two weeks to arrive.</p> <p><strong>These are the MenoRescue costs which decline while getting more units simultaneously:</strong></p> <p><strong>Basic -</strong> 1 Bottle Supply of MenoRescue USD 59/bottle + SMALL SHIPPING.<span style="color: red;"><br /></span></p> <p><strong>Popular Pack -</strong> Buy 3 Get Bottle Supply of&nbsp;MenoRescue USD 49/bottle + FREE SHIPPING.</p> <p><strong>Best Value Pack - </strong>Buy 6 Bottle Supply of MenoRescue USD 39/bottle + FREE SHIPPING.</p> <p style="text-align: left;">MenoRescue Payments are made using 256-bit SSL technology to keep information safe and secure, and all orders arrive within a few business days of ordering.</p> <h3 style="text-align: left;"><strong style="font-family: georgia;">Special Offer: <span style="background-color: #fff2cc; color: red;"><a href="https://www.healthsupplement24x7.com/get-menorescue">Click Here To Get Heavy Discount Instantly!!</a></span></strong></h3> <p><span style="font-family: times;"><span style="font-size: medium;"><span style="color: red;">Good News: Get additional discount on shipping when you checkout with Mastercard or Discover card!</span></span></span></p> <div class="separator" style="clear: both; text-align: center;"> <p style="text-align: left;"><span style="font-size: medium;"><a style="clear: left; float: left; margin-bottom: 1em; margin-left: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/a/AVvXsEgJqDXBj2s2sKgxhjLGKnDNPxD392fUjUkF8lQbqbuoFZwPHnPE27muXA18Hs1EzbsUHHsPlOR9Njx119fwMPFiCrLv9NlRRfEUdLPeIVlqZmqjexv1dJ0pMoSO6VUtSY89rewM_LiPyGpkGpNCHHdprDSvrWyt6MprtcceNFal6bdDPK_FyvLHnQzy-A" alt="" width="110" height="120" border="0" data-original-height="120" data-original-width="110" /></a><span style="font-family: helvetica;"><span style="font-size: small;"><strong><span style="color: red;">APPROVED!</span><br /></strong></span></span></span></p> <p style="text-align: left;"><span style="font-family: helvetica;"><span style="font-size: small;">Limited supply available. We currently have product in stock and ready to ship within <span style="color: red;">24 hours</span>.</span></span></p> </div> <p><span style="font-family: helvetica;"><span style="font-size: small;"><strong><span style="color: red;">EXPIRE SOON</span></strong></span></span></p> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEi629v5lGaDLWOY9bxB_DJhjErZ5KRdBSkTGWzviEXHiuDVAkZR_btgaWgQfVUdBl7gSUCxH9cL2yc2JdSphMmQ9_Wt17BUECzxJT7ev4dFIbBW8v_xzflRAyk3iULY0Ey7YdNsSwVlgx1fnSrdpnAI00FUifPbciatwz47e4FMR469iVLhkPVjnqBsDp8z/w400-h389/WellMe%20MenoRescue%2011.png" alt="" width="400" height="389" border="0" data-original-height="545" data-original-width="560" /></a></div> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEhSJvORHtAeEI3H2rypjo7v70Cm2j2tC1B-Ja0K1qVp1MEYhmISktm3oeSPvmtOjcgIp6VWYex2WQ2w6gsXFZPdis4AmxfwRGftHtwSK5PNs5-vJjhVZwsNY6SljpUWbanRSWMbVUibr78lOgAkjowIEGQGH8g4my7mrAF8bND5KSQ7K8qU9d1qadr8WA/w327-h97/btn.png" alt="" width="327" height="97" border="0" data-original-height="84" data-original-width="282" /></a></div> <p style="text-align: center;">By submitting, you affirm to have read and agreed to our <a href="https://www.healthsupplement24x7.com/get-menorescue"><span style="color: red;">Terms &amp; Conditions</span></a>.</p> <p style="text-align: center;"><a href="https://www.healthsupplement24x7.com/get-menorescue"><span style="font-size: medium;"><span style="background-color: #ffe599;"><span style="color: red;"><span style="font-family: georgia;"><strong>HUGE SAVINGS Get Your MenoRescue &ldquo;Get Something OFF&rdquo; Get 2+1 Offer Hurry Only For 1st User!!</strong></span></span></span></span></a></p> <h2 style="text-align: left;"><strong><a href="https://sway.office.com/O70WclG0X6BGX6cC?ref=Link&amp;loc=mysways">MenoRescue</a> Refund Policy</strong></h2> <p>We're so confident you'll be head-over-heels thrilled with the results you get that we're giving you a full 6 months to try MenoRescue&trade; for yourself, completely risk-free, regardless of the package you choose.</p> <p><a href="https://www.ivoox.com/wellme-menorescue-advanced-formula-to-fix-menopause-cycle-audios-mp3_rf_115321568_1.html">MenoRescue</a>, like all WellMe supplements, comes with a 180-day moneyback guarantee. You have 180 days to test the supplement. If you&rsquo;re unhappy for any reason, then you can request a complete refund with no questions asked.</p> <h3 style="text-align: center;"><strong style="background-color: #ffe599; color: red; font-family: georgia;"><a href="https://www.healthsupplement24x7.com/get-menorescue">Experience, Enjoy, Refund: Our Unwavering Guarantee</a></strong></h3> <h2 style="text-align: left;"><strong>Summary</strong></h2> <p>MenoRescue provides women with a way to cope with the hormonal changes that happen as they get older. The formula isn't meant to be used as a medication, even though it packs a lot of strength with potent dosages and beneficial ingredients. Black cohosh has a starring role in this formula for the improvement against night sweats, but the entire ingredient list works together to strengthen bones, reduce hot flashes, and support the symptoms that consumers deal with as they age. With a money-back guarantee and a doctor-formulated recipe, consumers have nothing to lose by giving MenoRescue a chance.</p> <p class="ql-align-center" style="text-align: center;"><a href="https://www.healthsupplement24x7.com/get-menorescue"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgHYxn3NMPQqsmKG54OjkQESM8dw8D7zUXtssdLHaaWSYArzmNucZfEfKCOBsnUqZdp6i-enO0zDWtMGF2pKG2MifoTldIDExJOBDWxicPkSeox29VCmqX6Cz2feNaSfYBnC_BHUdfPT1qUGVgSNyn0NtyKxY-V-M-BDbo5jCOW4qSuxwu3TOTA3dSjIQ/s1600/Screenshot%20(1445).png" alt="" width="320" height="114" /></a></p> <p class="ql-align-center" style="text-align: center;"><span style="font-family: georgia;"><strong><a href="https://www.healthsupplement24x7.com/get-menorescue">Terms and Conditions</a></strong><strong> | </strong><a href="https://www.healthsupplement24x7.com/get-menorescue"><strong>Privacy</strong></a><strong> | </strong><a href="https://www.healthsupplement24x7.com/get-menorescue"><strong>Contact Us</strong></a></span></p> <p class="ql-align-center" style="text-align: center;"><span style="font-family: georgia;"><strong>&copy; 2023 <a href="https://www.healthsupplement24x7.com/get-menorescue">WellMe</a></strong><strong>. All Rights Reserved.</strong></span></p>
cplusx/Guoxiao
2023-09-02T07:19:03.000Z
[ "region:us" ]
cplusx
null
null
null
0
0
Entry not found
jaygala223/95-cloud-train-only-v1
2023-09-02T10:38:35.000Z
[ "region:us" ]
jaygala223
null
null
null
0
0
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: image splits: - name: train num_bytes: 1907784030.75 num_examples: 26301 download_size: 1897837399 dataset_size: 1907784030.75 --- # Dataset Card for "95-cloud-train-only-v1" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
quocanh34/result_with_w2v2_aug_on_fly_01
2023-09-02T07:29:09.000Z
[ "region:us" ]
quocanh34
null
null
null
0
0
--- dataset_info: features: - name: audio dtype: audio: sampling_rate: 16000 - name: id dtype: string - name: w2v2_baseline_transcription dtype: string - name: w2v2_baseline_norm dtype: string splits: - name: train num_bytes: 174371470.027 num_examples: 1299 download_size: 164200334 dataset_size: 174371470.027 --- # Dataset Card for "result_with_w2v2_aug_on_fly_01" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
rohanbalkondekar/HealthCareFacts_Small
2023-09-02T07:42:09.000Z
[ "region:us" ]
rohanbalkondekar
null
null
null
0
0
Entry not found
polymath707/thaillama2-without-emojis
2023-09-02T07:48:19.000Z
[ "license:apache-2.0", "region:us" ]
polymath707
null
null
null
0
0
--- license: apache-2.0 ---
dariolopez/Llama-2-oasst1-es-test-format2
2023-09-02T07:57:58.000Z
[ "region:us" ]
dariolopez
null
null
null
0
0
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: text dtype: string splits: - name: train num_bytes: 4567059 num_examples: 3909 download_size: 2525724 dataset_size: 4567059 --- # Dataset Card for "Llama-2-oasst1-es-test-format2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
polymath707/aseanllama2-without-emojis
2023-09-02T08:23:20.000Z
[ "license:apache-2.0", "region:us" ]
polymath707
null
null
null
0
0
--- license: apache-2.0 ---
eachadea/mnhtn-5M
2023-09-02T09:13:22.000Z
[ "region:us" ]
eachadea
null
null
null
0
0
Entry not found
Sagar0934/guanaco-llama2-1k
2023-09-03T10:12:58.000Z
[ "region:us" ]
Sagar0934
null
null
null
0
0
--- dataset_info: features: - name: text dtype: string splits: - name: train num_bytes: 1654448 num_examples: 1000 download_size: 0 dataset_size: 1654448 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "guanaco-llama2-1k" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
reciprocate/gsm8k_train_pairwise
2023-09-02T11:29:30.000Z
[ "region:us" ]
reciprocate
null
null
null
0
0
--- dataset_info: features: - name: prompt dtype: string - name: selected dtype: string - name: rejected dtype: string splits: - name: train num_bytes: 5031130 num_examples: 7043 download_size: 2891682 dataset_size: 5031130 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "gsm8k_train_pairwise" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
eryttrytr/sandyagga1
2023-09-02T09:20:10.000Z
[ "region:us" ]
eryttrytr
null
null
null
0
0
Entry not found
eryttrytr/sandyagga3
2023-09-02T09:20:16.000Z
[ "region:us" ]
eryttrytr
null
null
null
0
0
Entry not found
eryttrytr/sandyagga4
2023-09-02T09:20:18.000Z
[ "region:us" ]
eryttrytr
null
null
null
0
0
Entry not found
eryttrytr/sandyagga5
2023-09-02T09:20:18.000Z
[ "region:us" ]
eryttrytr
null
null
null
0
0
Entry not found
eryttrytr/sandyagga2
2023-09-02T09:20:15.000Z
[ "region:us" ]
eryttrytr
null
null
null
0
0
Entry not found
open-llm-leaderboard/details_PeanutJar__LLaMa-2-PeanutButter_v18_A-7B
2023-09-22T16:10:56.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of PeanutJar/LLaMa-2-PeanutButter_v18_A-7B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [PeanutJar/LLaMa-2-PeanutButter_v18_A-7B](https://huggingface.co/PeanutJar/LLaMa-2-PeanutButter_v18_A-7B)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PeanutJar__LLaMa-2-PeanutButter_v18_A-7B\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-09-22T16:10:43.222323](https://huggingface.co/datasets/open-llm-leaderboard/details_PeanutJar__LLaMa-2-PeanutButter_v18_A-7B/blob/main/results_2023-09-22T16-10-43.222323.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0014681208053691276,\n\ \ \"em_stderr\": 0.0003921042190298372,\n \"f1\": 0.05780201342281884,\n\ \ \"f1_stderr\": 0.0013345722358967201,\n \"acc\": 0.41051883916267196,\n\ \ \"acc_stderr\": 0.009653462102026637\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0014681208053691276,\n \"em_stderr\": 0.0003921042190298372,\n\ \ \"f1\": 0.05780201342281884,\n \"f1_stderr\": 0.0013345722358967201\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.07202426080363912,\n \ \ \"acc_stderr\": 0.007121147983537124\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7490134175217048,\n \"acc_stderr\": 0.01218577622051615\n\ \ }\n}\n```" repo_url: https://huggingface.co/PeanutJar/LLaMa-2-PeanutButter_v18_A-7B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|arc:challenge|25_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T09:37:14.213070.parquet' - config_name: harness_drop_3 data_files: - split: 2023_09_22T16_10_43.222323 path: - '**/details_harness|drop|3_2023-09-22T16-10-43.222323.parquet' - split: latest path: - '**/details_harness|drop|3_2023-09-22T16-10-43.222323.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_09_22T16_10_43.222323 path: - '**/details_harness|gsm8k|5_2023-09-22T16-10-43.222323.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-09-22T16-10-43.222323.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hellaswag|10_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T09:37:14.213070.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T09:37:14.213070.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T09_37_14.213070 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T09:37:14.213070.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T09:37:14.213070.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_09_22T16_10_43.222323 path: - '**/details_harness|winogrande|5_2023-09-22T16-10-43.222323.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-09-22T16-10-43.222323.parquet' - config_name: results data_files: - split: 2023_09_02T09_37_14.213070 path: - results_2023-09-02T09:37:14.213070.parquet - split: 2023_09_22T16_10_43.222323 path: - results_2023-09-22T16-10-43.222323.parquet - split: latest path: - results_2023-09-22T16-10-43.222323.parquet --- # Dataset Card for Evaluation run of PeanutJar/LLaMa-2-PeanutButter_v18_A-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PeanutJar/LLaMa-2-PeanutButter_v18_A-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [PeanutJar/LLaMa-2-PeanutButter_v18_A-7B](https://huggingface.co/PeanutJar/LLaMa-2-PeanutButter_v18_A-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PeanutJar__LLaMa-2-PeanutButter_v18_A-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-22T16:10:43.222323](https://huggingface.co/datasets/open-llm-leaderboard/details_PeanutJar__LLaMa-2-PeanutButter_v18_A-7B/blob/main/results_2023-09-22T16-10-43.222323.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0014681208053691276, "em_stderr": 0.0003921042190298372, "f1": 0.05780201342281884, "f1_stderr": 0.0013345722358967201, "acc": 0.41051883916267196, "acc_stderr": 0.009653462102026637 }, "harness|drop|3": { "em": 0.0014681208053691276, "em_stderr": 0.0003921042190298372, "f1": 0.05780201342281884, "f1_stderr": 0.0013345722358967201 }, "harness|gsm8k|5": { "acc": 0.07202426080363912, "acc_stderr": 0.007121147983537124 }, "harness|winogrande|5": { "acc": 0.7490134175217048, "acc_stderr": 0.01218577622051615 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
mindchain/instruct01
2023-09-02T09:48:15.000Z
[ "region:us" ]
mindchain
null
null
null
0
0
Entry not found
Tharun2003/tharun-123
2023-09-02T09:57:08.000Z
[ "license:openrail", "region:us" ]
Tharun2003
null
null
null
0
0
--- license: openrail ---
Tharun2003/tharun-321
2023-09-02T10:04:15.000Z
[ "license:openrail", "region:us" ]
Tharun2003
null
null
null
0
0
--- license: openrail ---
Pisethan/avirut
2023-09-02T10:07:20.000Z
[ "license:apache-2.0", "region:us" ]
Pisethan
null
null
null
0
0
--- license: apache-2.0 ---
artdwn/Ayang
2023-09-03T05:36:34.000Z
[ "region:us" ]
artdwn
null
null
null
0
0
Entry not found
Cyleux/promptToApiSpec
2023-09-02T10:29:32.000Z
[ "region:us" ]
Cyleux
null
null
null
0
0
Entry not found
menorescue/wellme-menorescue
2023-09-02T10:54:23.000Z
[ "region:us" ]
menorescue
null
null
null
0
0
➢**Product Name** — [WellMe MenoRescue](https://wellme-menorescue-reviews.jimdosite.com/) ➢**Main Benefits** — Overcoming Menopausal Symptoms ➢**Composition** — Natural Organic Compound ➢**Rating** — ★★★★✰ 4.9/5 ➢**Availability** — [Online](https://www.healthsupplement24x7.com/get-menorescue) ➢**Official Website** — [https://www.healthsupplement24x7.com/get-menorescue](https://www.healthsupplement24x7.com/get-menorescue) [WellMe MenoRescue](https://healthsupplements24x7.blogspot.com/2023/09/wellme-menorescue.html) are such pills that are manufactured by keeping those who are in menopause phases. In menopause body does not produce estrogen and progesterone, which results in various issues like sleep disorder, weight gain, hot flashes, etc. these help older people in controlling menopause symptoms and losing weight. [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEg1X1PrneOjxRLzCwTG4lbapof3U7ydl7yhzEZqqBWlPUEng3x8r7z_WAZWTdbDONXbch-kc2Kb9g00Y8mq0Xv-sgw1ZVqdUdvnJh7pSaI4zb9Z61J-xXzfgiAn8HjsbHoN4ll_-tMe2h_v4HAX68cBf8d3usccDbubLSsJb58-x3XkUdL3IEHOirnIl4E4/w640-h350/Screenshot%20(3143).png)](https://www.healthsupplement24x7.com/get-menorescue) ### _**[Click Here To Order WellMe MenoRescue From The Official Website](https://www.healthsupplement24x7.com/get-menorescue)**_ **What Is MenoRescue?** ----------------------- [MenoRescue](https://wellme-menorescue.clubeo.com/page/menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance.html) is a women’s health dietary supplement formulated to assist them in overcoming menopausal symptoms. This all-natural supplement is reckoned to achieve hormonal balance so that women can finally experience healthy sleep again, have better control over their emotions, and make rational decisions. It’s even claimed that the unique combination of ingredients inside MenoRescue eliminates the culprit for hormonal fluctuations. That’s not all, from our editorial team’s viewpoint, the beauty of this formula rests in its ability to extend beyond easing menopausal symptoms, making [MenoRescue](https://www.ivoox.com/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-audios-mp3_rf_115321252_1.html) a wholesome supplement. Before we discuss the possible benefits, let’s take a brief moment to reflect on the exact cause that the creators, WellMe have factored into their solution. **Does WellMe MenoRescue Work?** -------------------------------- [WellMe MenoRescue](https://pdfhost.io/v/zADM.lQYZ_WellMe_MenoRescue_Overcoming_Menopausal_Symptoms_And_Imaprove_Harmonal_Balance) improves the functionality of the endocrine system in females using healthy strains. This will equip them to deal with menopause symptoms that are rapid-occurring such as weight gain. [WellMe MenoRescue](https://www.sympla.com.br/evento/wellme-menorescue---overcoming-menopausal-symptoms-and-imaprove-harmonal-balance/2145946) worked for so many females who had been complaining about low energy levels and seeing their lives running downward. The collection of colonizing gut bacteria in the supplement changed their eating habits which led them to spend a quality life without having menopause symptoms. **Advantages Of The MenoRescue Supplement** ------------------------------------------- According to [WellMe MenoRescue](https://soundcloud.com/menorescue/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance) works by balancing cortisol and hormone levels. By balancing levels of cortisol, the stress hormone, [MenoRescue](https://groups.google.com/g/wellme-menorescue-pills/c/GHY1Stgn23k) can support a range of secondary benefits – from weight loss to estrogen balance to normal body temperature. Here are some of the benefits of MenoRescue, according to the manufacturer: **Cortisol Levels**\-Sensoril® is a multi-purpose ingredient in this supplement. It is a patented form of the ancient herb Ashwagandha which is popular in the medical field for its unique ability to promote healthy cortisol levels. Several peer-reviewed clinical trials support its efficacy in managing cortisol levels. Green Tea is another ingredient that helps in this. **Excess Body Fat**\-No pill or juice is a magical remedy for fat burn. Various studies and clinical trials have found Greenselect Phytosome to be a highly safe and effective tool for reducing fat and shrinking that extra belly. Moreover, it’s a caffeine-devoid green tea extract. Hence MenoRescue may help in not only weight loss but also firing up the metabolism. **Mood Swings**\-The entire combination of ingredients used in its making is such that reduced stress and depression are highly likely outcomes. For instance, Rhodiola powder is thought to help the body deal with stress better. **Energy Levels**\-Hormones playing a roller coaster could mean havoc on energy levels. One feels tired, and all-day fatigue is common. MenoRescue may help regulate energy levels by giving your body an energy boost. **Hot Flashes**\-Red clover is a traditional medicine used to cure menopausal symptoms. Hot flashes and night sweats are one of them. There is a shortage of scientific research on the potential benefits of red clover. However, ancient medical sciences used it for treating hot flashes. **Cortisol Levels**\-Sensoril is a multi-purpose ingredient in this supplement. It is a patented form of the ancient herb Ashwagandha which is popular in the medical field for its unique ability to promote healthy cortisol levels. Several peer-reviewed clinical trials support its efficacy in managing cortisol levels. Green Tea is another ingredient that helps in this. **Restful Sleep**\-As claimed, it treats hormonal imbalance, thus making cortisol levels healthy. A peaceful good night’s sleep is a natural consequence. [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEjByYWMygfzsbenlSAVa7eN1IMqZ3FHRSWc4QZH_jIJqVNBgzIjTixSk0RkC_fizv5VzkGlocEGHH7ViNgVhkQHitaFepYCLnITiboLxdBDfBRc_qvFDHqoC6ZPEHQtfkRnt4L6ZW0i3zzdIlsUPuaHDBnf74x2As4tMjqEOAVsAZ5FQmhA3SYpBFdzYdAb/w640-h525/Screenshot%20(3133).png)](https://www.healthsupplement24x7.com/get-menorescue) ### _**[Order Your Supply Of WellMe MenoRescue Now And Start Enjoying The Benefits!](https://www.healthsupplement24x7.com/get-menorescue)**_ **Which Ingredients Used In WellMe MenoRescue?** ------------------------------------------------ [WellMe MenoRescue](https://events.humanitix.com/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance) employs a natural strong component blend to improve female libido. These chemicals and aphrodisiacs act synergistically to restore the woman’s ability to experience sexual pleasure.These include: **Sensoril**: Being a patented form of Ashwagandha extract, it's vital in the [MenoRescue](https://form.jotform.com/menorescue/wellme-menorescue) TM formulation. The ingredient has the unique ability to regulate cortisol levels. Having been obtained from organically grown plants and undergone clinical trials, the compound promotes healthy cortisol levels more optimally than a placebo. **Chasteberry:** Chasteberry can support progesterone levels, according to WellMe. One 8-week study from 2019 found chasteberry led to “significantly lower” incidents and severity of menopause symptoms compared to a placebo. Other studies have found chasteberry helps with mood, body temperature, sleep, mood, and even breast tenderness, among other benefits. **Black Cohosh:** This is another ingredient with phytoestrogen properties. It minimizes the severity of menopausal symptoms by lessening the frequency of hot flashes, regulating body temperature, promoting sound sleep, regulating mood, and boosting libido. **Red Clover :** [MenoRescue](https://devfolio.co/projects/wellme-menorescue-5ec6) contains red clover, which has particularly high levels of isoflavones. Those isoflavones are important because they’re structurally similar to estrogen. Even a small amount of red clover – 80mg – has been linked to menopause relief. One 12-week study found red clover led to a “remarkable difference” in the health and happiness of women going through menopause. Participants reported a 50% reduction in menopause symptoms based on the Menopause Rating Scale (MRS), which covers 11 of the most common menopause symptoms. **Greenselect Phytosome:** It's obtained from tea and formulated with a patented absorption-boosting mechanism. The tea extract is clinically proven to promote healthy cortisol levels. In addition, the element boosts metabolism, alleviates excess fat, and suppresses cravings. As a result, it's ideal for eliminating stubborn fat, combating oxidative stress, and reducing belly fat. **Rhodiola Rosea:** Another herbal extract obtained from an Asian-European plant is the Rhodiola Rosea. Besides enhancing healthy cortisol levels during menopause, the ingredient has multifold benefits. It has adaptogens that aid in alleviating stress, fatigue, and depression. In addition, the element supports cognitive abilities, regulates body temperature, and enhances muscle performance and circulatory health. **Bioperine:** BioPerine is a patented form of black pepper extract known to boost absorption of other ingredients. Studies show your body absorbs certain supplement ingredients more easily when you combine it with BioPerine. The BioPerine in MenoRescue contains 95% piperine (the active ingredient in pepper) by concentration. **Schizandra Berry Extract:** This essential extract is obtained from a vine-like plant that produces purple-red berries. It's used in this supplement based on its ability to regulate cortisol levels and rebalance estrogen and progesterone hormones. In addition, it positively impacts overall health since it supports better sleep, enhances cognitive skills, and boosts muscle strength. [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgDXVp82o8N4CTtPIORQI_5ldUxGuG7wNPnUucxvLrA8DBr6vJRcYFFBasriXYgxZ814P1VhJPY2dRta2ae9-qU6G3qB1TWifa9loVb9qDkpvU1DpCR3uAer91ZSPi3pbcgk8H_bRg0p_ZdeHYuO3XQ2KGWKvoYHU_zK6LgCcplUYys-fLK7UmZ7x1KioYw/w640-h544/Screenshot%20(3146).png)](https://www.healthsupplement24x7.com/get-menorescue) ### _**[Click to See the Full List of Ingredients in WellMe MenoRescue](https://www.healthsupplement24x7.com/get-menorescue)**_ **Recommended Dosage** ---------------------- The manufacturer recommends taking two capsules of [WellMe MenoRescue](https://www.townscript.com/e/wellme-menorescue-013422) daily first thing in the morning. You may take it with or without food. There are 60 capsules in the container, providing you with a 30-day supply.You will get better results if you take [WellMe MenoRescue](https://electronoobs.io/project/11429#) pills in the morning g on an empty stomach. However, if you have difficulty digesting the [WellMe MenoRescue](https://menorescue.wixsite.com/wellme-menorescue) pills at the start, then take them after breakfast or lunch. **WellMe MenoRescue Side Effects** ---------------------------------- People become conscious before buying any new product, especially in the case of supplements s; this can result in severe side effects. So, it is crucial to check the ingredients of any supplement so that you can avoid allergies (if you have formed any ingredient). Other than the ingredients, there are mild side effects of [WellMe MenoRescue](https://colab.research.google.com/drive/1gR2oY-nxSkQLOVm9uky3M7CyTnSlFIfl) pills that vary among people. However, some people face difficulties in digesting the WellMe MenoRescue supplement at the start. When the body gets used to it, the digestion problem will not occur. **WellMe MenoRescue Pro At the Most Affordable Price** ------------------------------------------------------ Only the official [WellMe MenoRescue](https://www.scoop.it/topic/wellme-menorescue) website is an authentic source for the serum. Manufacturers have not authorized any other sellers to offer this product, so any product you see elsewhere is likely a fake and not a genuine product. Purchase the serum only from the official site to avoid scams. The manufacturer offers many bundles and discounts to make this serum more affordable. Below are the pricing and details: _**1 Bottle 1-Month Supply - $59 per bottle**_ _**3 Bottles 3-Month Supply + Bonuses - $49 per bottle**_ _**6 Bottles 6-Month Supply + Bonuses $39 per bottle**_ [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgZ6fezxuvl33Kl_s_WxBxukEv1bZvz0xSuW_zWyvhaeYrR_EbEGDKdEWY7t3tA_-h2p6oTkWcB4hsIRAR03rUge4AWaOEksBOuskbbpFRZnc1WhIhoOxG4KDMhQjz8vmjM-jRwp0Hu1byKvu-DF5MdEBWTzVsMwF_jjOQX9p1Li7yVS5b3XRPuC6uSqL8u/w640-h420/Screenshot%20(3144).png)](https://www.healthsupplement24x7.com/get-menorescue) ### [_**(Limited Time Discount Offer) Purchase WellMe MenoRescue at Special Price + Free Bonuses**_](https://www.healthsupplement24x7.com/get-menorescue) **Refund Policy for WellMe MenoRescue Customers** ------------------------------------------------- Customers who buy the supplement via the manufacturer's site have access to a return policy. There's a 180-days grace period for returns and refunds beginning on the date of purchase. If you want to have a refund, then follow simple steps. Customers who buy the supplement from Amazon may be subject to a different return policy than those who buy it from other retailers. You can check the method of amazon refund policy, or you can contact their customer support contact to get the details. **Where to Buy WellMe MenoRescue?** ----------------------------------- [WellMe MenoRescue](https://wellme-menorescue.clubeo.com/page/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance.html) are available on the official website of the company. You can also get it from any reliable online source like Amazon, where you can also check the feedback of customers worldwide with all the pros and cons of consuming them. **WellMe MenoRescue Final Thoughts** ------------------------------------ [WellMe MenoRescue](https://wellme-menorescue.clubeo.com/calendar/2023/09/02/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance) are new to the market with a limited number of customers. The market for this product is growing fast as people are aware of the positive impacts, they can have with [WellMe MenoRescue](https://devfolio.co/@wellmenorescue) pill. Women who are in the menopause phase need to consume these soils as these pills are designed in such a way theta to minimize the symptoms that they have to bear on missing their monthly periods. They act effectively in increasing the metabolic rate and the chances of reducing extra pounds within a short period. The success rate of these in terms of gut health and weight loss is seen among customers, and you can also check the reviews at the online stores. [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEiv193RPyXjY04nGhMT3XQEGsWaQUsChOx7NJQqLxby-_DyJ70H4k40TrslmdfrZhQQCXmhE--_Yxi3cSwE_vdldkRRaGA47zcLZJu10W-1ecu749wOtVMJCe1g-HtPbGkS4pG71rua1pe7dTpesogQ3uB2m5Qiq1qydDJDJddmro6nx_L2eF0UrdDDwKet/w640-h398/Screenshot%20(3131).png)](https://www.healthsupplement24x7.com/get-menorescue) ### _**[Click Here to Order WellMe MenoRescue at an Exclusive Low Price Today!](https://www.healthsupplement24x7.com/get-menorescue)**_ [https://healthsupplements24x7.blogspot.com/2023/09/wellme-menorescue.html](https://healthsupplements24x7.blogspot.com/2023/09/wellme-menorescue.html) [https://www.ivoox.com/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-audios-mp3\_rf\_115321252\_1.html](https://www.ivoox.com/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-audios-mp3_rf_115321252_1.html) [https://pdfhost.io/v/zADM.lQYZ\_WellMe\_MenoRescue\_Overcoming\_Menopausal\_Symptoms\_And\_Imaprove\_Harmonal\_Balance](https://pdfhost.io/v/zADM.lQYZ_WellMe_MenoRescue_Overcoming_Menopausal_Symptoms_And_Imaprove_Harmonal_Balance) [https://wellme-menorescue.clubeo.com/](https://wellme-menorescue.clubeo.com/) [https://wellme-menorescue.clubeo.com/calendar/2023/09/02/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance](https://wellme-menorescue.clubeo.com/calendar/2023/09/02/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance) [https://wellme-menorescue.clubeo.com/page/menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance.html](https://wellme-menorescue.clubeo.com/page/menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance.html) [https://wellme-menorescue.clubeo.com/page/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance.html](https://wellme-menorescue.clubeo.com/page/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance.html) [https://soundcloud.com/menorescue/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance](https://soundcloud.com/menorescue/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance) [https://wellme-menorescue-reviews.jimdosite.com/](https://wellme-menorescue-reviews.jimdosite.com/) [https://www.sympla.com.br/evento/wellme-menorescue---overcoming-menopausal-symptoms-and-imaprove-harmonal-balance/2145946](https://www.sympla.com.br/evento/wellme-menorescue---overcoming-menopausal-symptoms-and-imaprove-harmonal-balance/2145946) [https://www.scoop.it/topic/wellme-menorescue](https://www.scoop.it/topic/wellme-menorescue) [https://www.townscript.com/e/wellme-menorescue-013422](https://www.townscript.com/e/wellme-menorescue-013422) [https://menorescue.wixsite.com/wellme-menorescue](https://menorescue.wixsite.com/wellme-menorescue) [https://blogfreely.net/menorescue/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal](https://blogfreely.net/menorescue/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal) [https://colab.research.google.com/drive/16SktN\_vwxrp54SpXL5t7zbrcS4miD\_bI](https://colab.research.google.com/drive/16SktN_vwxrp54SpXL5t7zbrcS4miD_bI) [https://colab.research.google.com/drive/1DWqoslGLn0tAnmY185lyx-aizkYsd2LE](https://colab.research.google.com/drive/1DWqoslGLn0tAnmY185lyx-aizkYsd2LE) [https://colab.research.google.com/drive/1jgtNiqpw9X2DcVZ4yGSBY9BBRSap16gm](https://colab.research.google.com/drive/1jgtNiqpw9X2DcVZ4yGSBY9BBRSap16gm) [https://colab.research.google.com/drive/1B8K6CsrRbxPsE6IZSKpf9EiNafoeNdE4](https://colab.research.google.com/drive/1B8K6CsrRbxPsE6IZSKpf9EiNafoeNdE4) [https://colab.research.google.com/drive/1gR2oY-nxSkQLOVm9uky3M7CyTnSlFIfl](https://colab.research.google.com/drive/1gR2oY-nxSkQLOVm9uky3M7CyTnSlFIfl) [https://devfolio.co/@wellmenorescue](https://devfolio.co/@wellmenorescue) [https://electronoobs.io/project/11429#](https://electronoobs.io/project/11429#) [https://events.humanitix.com/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance](https://events.humanitix.com/wellme-menorescue-overcoming-menopausal-symptoms-and-imaprove-harmonal-balance) [https://kiucks-ghaiocts-pleapp.yolasite.com/](https://kiucks-ghaiocts-pleapp.yolasite.com/) [https://devfolio.co/projects/wellme-menorescue-5ec6](https://devfolio.co/projects/wellme-menorescue-5ec6) [https://form.jotform.com/menorescue/wellme-menorescue](https://form.jotform.com/menorescue/wellme-menorescue) [https://groups.google.com/g/get-menorescue/c/Uzo0VgWXZEY](https://groups.google.com/g/get-menorescue/c/Uzo0VgWXZEY) [https://groups.google.com/g/read-menorescue-reviews/c/SqrU4LvX53U](https://groups.google.com/g/read-menorescue-reviews/c/SqrU4LvX53U) [https://groups.google.com/g/wellme-menorescue/c/brEQVbXwO44](https://groups.google.com/g/wellme-menorescue/c/brEQVbXwO44) [https://groups.google.com/g/read-wellme-menorescue-reviews/c/tn1TSvG7viQ](https://groups.google.com/g/read-wellme-menorescue-reviews/c/tn1TSvG7viQ) [https://groups.google.com/g/wellme-menorescue-pills/c/GHY1Stgn23k](https://groups.google.com/g/wellme-menorescue-pills/c/GHY1Stgn23k)
Jakir057/custom_cleaned18k
2023-09-02T11:13:53.000Z
[ "region:us" ]
Jakir057
null
null
null
0
0
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '10' '1': '100' '2': '1000' '3': '2' '4': '20' '5': '200' '6': '5' '7': '50' '8': '500' splits: - name: train num_bytes: 145908891.0 num_examples: 16200 - name: test num_bytes: 15833757.0 num_examples: 1800 download_size: 157098269 dataset_size: 161742648.0 --- # Dataset Card for "custom_cleaned18k" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj
2023-09-02T11:15:29.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj](https://huggingface.co/CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-02T11:14:02.105897](https://huggingface.co/datasets/open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj/blob/main/results_2023-09-02T11%3A14%3A02.105897.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5404583425181168,\n\ \ \"acc_stderr\": 0.03467391386458985,\n \"acc_norm\": 0.5446730811780173,\n\ \ \"acc_norm_stderr\": 0.03465335018844135,\n \"mc1\": 0.25458996328029376,\n\ \ \"mc1_stderr\": 0.015250117079156494,\n \"mc2\": 0.3791772345992325,\n\ \ \"mc2_stderr\": 0.013816425829144595\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5460750853242321,\n \"acc_stderr\": 0.014549221105171864,\n\ \ \"acc_norm\": 0.5853242320819113,\n \"acc_norm_stderr\": 0.014397070564409172\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6153156741684923,\n\ \ \"acc_stderr\": 0.004855262903270802,\n \"acc_norm\": 0.8247361083449513,\n\ \ \"acc_norm_stderr\": 0.0037941565512722686\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n\ \ \"acc_stderr\": 0.04313531696750575,\n \"acc_norm\": 0.4740740740740741,\n\ \ \"acc_norm_stderr\": 0.04313531696750575\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5394736842105263,\n \"acc_stderr\": 0.04056242252249034,\n\ \ \"acc_norm\": 0.5394736842105263,\n \"acc_norm_stderr\": 0.04056242252249034\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.52,\n\ \ \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \ \ \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6,\n \"acc_stderr\": 0.030151134457776285,\n \ \ \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.030151134457776285\n \ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5972222222222222,\n\ \ \"acc_stderr\": 0.04101405519842425,\n \"acc_norm\": 0.5972222222222222,\n\ \ \"acc_norm_stderr\": 0.04101405519842425\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\"\ : 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4797687861271676,\n\ \ \"acc_stderr\": 0.03809342081273957,\n \"acc_norm\": 0.4797687861271676,\n\ \ \"acc_norm_stderr\": 0.03809342081273957\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n\ \ \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n\ \ \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.41702127659574467,\n \"acc_stderr\": 0.03223276266711711,\n\ \ \"acc_norm\": 0.41702127659574467,\n \"acc_norm_stderr\": 0.03223276266711711\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.32456140350877194,\n\ \ \"acc_stderr\": 0.04404556157374768,\n \"acc_norm\": 0.32456140350877194,\n\ \ \"acc_norm_stderr\": 0.04404556157374768\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.496551724137931,\n \"acc_stderr\": 0.041665675771015785,\n\ \ \"acc_norm\": 0.496551724137931,\n \"acc_norm_stderr\": 0.041665675771015785\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3333333333333333,\n \"acc_stderr\": 0.0242785680243077,\n \"acc_norm\"\ : 0.3333333333333333,\n \"acc_norm_stderr\": 0.0242785680243077\n },\n\ \ \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3253968253968254,\n\ \ \"acc_stderr\": 0.041905964388711366,\n \"acc_norm\": 0.3253968253968254,\n\ \ \"acc_norm_stderr\": 0.041905964388711366\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6,\n\ \ \"acc_stderr\": 0.02786932057166463,\n \"acc_norm\": 0.6,\n \ \ \"acc_norm_stderr\": 0.02786932057166463\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.4187192118226601,\n \"acc_stderr\": 0.034711928605184676,\n\ \ \"acc_norm\": 0.4187192118226601,\n \"acc_norm_stderr\": 0.034711928605184676\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237101,\n \"acc_norm\"\ : 0.59,\n \"acc_norm_stderr\": 0.04943110704237101\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.036639749943912434,\n\ \ \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.036639749943912434\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.6868686868686869,\n \"acc_stderr\": 0.033042050878136525,\n \"\ acc_norm\": 0.6868686868686869,\n \"acc_norm_stderr\": 0.033042050878136525\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7772020725388601,\n \"acc_stderr\": 0.030031147977641538,\n\ \ \"acc_norm\": 0.7772020725388601,\n \"acc_norm_stderr\": 0.030031147977641538\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.4794871794871795,\n \"acc_stderr\": 0.02532966316348994,\n \ \ \"acc_norm\": 0.4794871794871795,\n \"acc_norm_stderr\": 0.02532966316348994\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.31851851851851853,\n \"acc_stderr\": 0.028406533090608466,\n \ \ \"acc_norm\": 0.31851851851851853,\n \"acc_norm_stderr\": 0.028406533090608466\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5252100840336135,\n \"acc_stderr\": 0.03243718055137411,\n \ \ \"acc_norm\": 0.5252100840336135,\n \"acc_norm_stderr\": 0.03243718055137411\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"\ acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7302752293577982,\n \"acc_stderr\": 0.01902848671111544,\n \"\ acc_norm\": 0.7302752293577982,\n \"acc_norm_stderr\": 0.01902848671111544\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.39351851851851855,\n \"acc_stderr\": 0.03331747876370312,\n \"\ acc_norm\": 0.39351851851851855,\n \"acc_norm_stderr\": 0.03331747876370312\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7156862745098039,\n \"acc_stderr\": 0.03166009679399812,\n \"\ acc_norm\": 0.7156862745098039,\n \"acc_norm_stderr\": 0.03166009679399812\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7383966244725738,\n \"acc_stderr\": 0.028609516716994934,\n \ \ \"acc_norm\": 0.7383966244725738,\n \"acc_norm_stderr\": 0.028609516716994934\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6188340807174888,\n\ \ \"acc_stderr\": 0.03259625118416828,\n \"acc_norm\": 0.6188340807174888,\n\ \ \"acc_norm_stderr\": 0.03259625118416828\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.5725190839694656,\n \"acc_stderr\": 0.04338920305792399,\n\ \ \"acc_norm\": 0.5725190839694656,\n \"acc_norm_stderr\": 0.04338920305792399\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7107438016528925,\n \"acc_stderr\": 0.04139112727635463,\n \"\ acc_norm\": 0.7107438016528925,\n \"acc_norm_stderr\": 0.04139112727635463\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6388888888888888,\n\ \ \"acc_stderr\": 0.04643454608906275,\n \"acc_norm\": 0.6388888888888888,\n\ \ \"acc_norm_stderr\": 0.04643454608906275\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6257668711656442,\n \"acc_stderr\": 0.03802068102899616,\n\ \ \"acc_norm\": 0.6257668711656442,\n \"acc_norm_stderr\": 0.03802068102899616\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2857142857142857,\n\ \ \"acc_stderr\": 0.042878587513404544,\n \"acc_norm\": 0.2857142857142857,\n\ \ \"acc_norm_stderr\": 0.042878587513404544\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n\ \ \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8247863247863247,\n\ \ \"acc_stderr\": 0.02490443909891822,\n \"acc_norm\": 0.8247863247863247,\n\ \ \"acc_norm_stderr\": 0.02490443909891822\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001974,\n \ \ \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001974\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7637292464878672,\n\ \ \"acc_stderr\": 0.015190473717037514,\n \"acc_norm\": 0.7637292464878672,\n\ \ \"acc_norm_stderr\": 0.015190473717037514\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6127167630057804,\n \"acc_stderr\": 0.026226158605124658,\n\ \ \"acc_norm\": 0.6127167630057804,\n \"acc_norm_stderr\": 0.026226158605124658\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24022346368715083,\n\ \ \"acc_stderr\": 0.014288343803925293,\n \"acc_norm\": 0.24022346368715083,\n\ \ \"acc_norm_stderr\": 0.014288343803925293\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5620915032679739,\n \"acc_stderr\": 0.028408302020332687,\n\ \ \"acc_norm\": 0.5620915032679739,\n \"acc_norm_stderr\": 0.028408302020332687\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.617363344051447,\n\ \ \"acc_stderr\": 0.027604689028581975,\n \"acc_norm\": 0.617363344051447,\n\ \ \"acc_norm_stderr\": 0.027604689028581975\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.654320987654321,\n \"acc_stderr\": 0.02646248777700187,\n\ \ \"acc_norm\": 0.654320987654321,\n \"acc_norm_stderr\": 0.02646248777700187\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.41843971631205673,\n \"acc_stderr\": 0.02942799403941999,\n \ \ \"acc_norm\": 0.41843971631205673,\n \"acc_norm_stderr\": 0.02942799403941999\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.39374185136897,\n\ \ \"acc_stderr\": 0.012478532272564439,\n \"acc_norm\": 0.39374185136897,\n\ \ \"acc_norm_stderr\": 0.012478532272564439\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.47794117647058826,\n \"acc_stderr\": 0.030343264224213528,\n\ \ \"acc_norm\": 0.47794117647058826,\n \"acc_norm_stderr\": 0.030343264224213528\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5424836601307189,\n \"acc_stderr\": 0.020154685712590895,\n \ \ \"acc_norm\": 0.5424836601307189,\n \"acc_norm_stderr\": 0.020154685712590895\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n\ \ \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.6363636363636364,\n\ \ \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5673469387755102,\n \"acc_stderr\": 0.03171752824062664,\n\ \ \"acc_norm\": 0.5673469387755102,\n \"acc_norm_stderr\": 0.03171752824062664\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6965174129353234,\n\ \ \"acc_stderr\": 0.03251006816458618,\n \"acc_norm\": 0.6965174129353234,\n\ \ \"acc_norm_stderr\": 0.03251006816458618\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.78,\n \"acc_stderr\": 0.041633319989322626,\n \ \ \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.041633319989322626\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.46987951807228917,\n\ \ \"acc_stderr\": 0.03885425420866766,\n \"acc_norm\": 0.46987951807228917,\n\ \ \"acc_norm_stderr\": 0.03885425420866766\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7543859649122807,\n \"acc_stderr\": 0.03301405946987249,\n\ \ \"acc_norm\": 0.7543859649122807,\n \"acc_norm_stderr\": 0.03301405946987249\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.25458996328029376,\n\ \ \"mc1_stderr\": 0.015250117079156494,\n \"mc2\": 0.3791772345992325,\n\ \ \"mc2_stderr\": 0.013816425829144595\n }\n}\n```" repo_url: https://huggingface.co/CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|arc:challenge|25_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hellaswag|10_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T11:14:02.105897.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T11:14:02.105897.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T11_14_02.105897 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T11:14:02.105897.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T11:14:02.105897.parquet' - config_name: results data_files: - split: 2023_09_02T11_14_02.105897 path: - results_2023-09-02T11:14:02.105897.parquet - split: latest path: - results_2023-09-02T11:14:02.105897.parquet --- # Dataset Card for Evaluation run of CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj](https://huggingface.co/CHIH-HUNG/llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-02T11:14:02.105897](https://huggingface.co/datasets/open-llm-leaderboard/details_CHIH-HUNG__llama-2-13b-huangyt_FINETUNE2_3w-q_k_v_o_proj/blob/main/results_2023-09-02T11%3A14%3A02.105897.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5404583425181168, "acc_stderr": 0.03467391386458985, "acc_norm": 0.5446730811780173, "acc_norm_stderr": 0.03465335018844135, "mc1": 0.25458996328029376, "mc1_stderr": 0.015250117079156494, "mc2": 0.3791772345992325, "mc2_stderr": 0.013816425829144595 }, "harness|arc:challenge|25": { "acc": 0.5460750853242321, "acc_stderr": 0.014549221105171864, "acc_norm": 0.5853242320819113, "acc_norm_stderr": 0.014397070564409172 }, "harness|hellaswag|10": { "acc": 0.6153156741684923, "acc_stderr": 0.004855262903270802, "acc_norm": 0.8247361083449513, "acc_norm_stderr": 0.0037941565512722686 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750575, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750575 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5394736842105263, "acc_stderr": 0.04056242252249034, "acc_norm": 0.5394736842105263, "acc_norm_stderr": 0.04056242252249034 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6, "acc_stderr": 0.030151134457776285, "acc_norm": 0.6, "acc_norm_stderr": 0.030151134457776285 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5972222222222222, "acc_stderr": 0.04101405519842425, "acc_norm": 0.5972222222222222, "acc_norm_stderr": 0.04101405519842425 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4797687861271676, "acc_stderr": 0.03809342081273957, "acc_norm": 0.4797687861271676, "acc_norm_stderr": 0.03809342081273957 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.28431372549019607, "acc_stderr": 0.04488482852329017, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.04488482852329017 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.41702127659574467, "acc_stderr": 0.03223276266711711, "acc_norm": 0.41702127659574467, "acc_norm_stderr": 0.03223276266711711 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.32456140350877194, "acc_stderr": 0.04404556157374768, "acc_norm": 0.32456140350877194, "acc_norm_stderr": 0.04404556157374768 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.496551724137931, "acc_stderr": 0.041665675771015785, "acc_norm": 0.496551724137931, "acc_norm_stderr": 0.041665675771015785 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.0242785680243077, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.0242785680243077 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3253968253968254, "acc_stderr": 0.041905964388711366, "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6, "acc_stderr": 0.02786932057166463, "acc_norm": 0.6, "acc_norm_stderr": 0.02786932057166463 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4187192118226601, "acc_stderr": 0.034711928605184676, "acc_norm": 0.4187192118226601, "acc_norm_stderr": 0.034711928605184676 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237101, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237101 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6727272727272727, "acc_stderr": 0.036639749943912434, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.036639749943912434 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6868686868686869, "acc_stderr": 0.033042050878136525, "acc_norm": 0.6868686868686869, "acc_norm_stderr": 0.033042050878136525 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7772020725388601, "acc_stderr": 0.030031147977641538, "acc_norm": 0.7772020725388601, "acc_norm_stderr": 0.030031147977641538 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4794871794871795, "acc_stderr": 0.02532966316348994, "acc_norm": 0.4794871794871795, "acc_norm_stderr": 0.02532966316348994 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.31851851851851853, "acc_stderr": 0.028406533090608466, "acc_norm": 0.31851851851851853, "acc_norm_stderr": 0.028406533090608466 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5252100840336135, "acc_stderr": 0.03243718055137411, "acc_norm": 0.5252100840336135, "acc_norm_stderr": 0.03243718055137411 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7302752293577982, "acc_stderr": 0.01902848671111544, "acc_norm": 0.7302752293577982, "acc_norm_stderr": 0.01902848671111544 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.39351851851851855, "acc_stderr": 0.03331747876370312, "acc_norm": 0.39351851851851855, "acc_norm_stderr": 0.03331747876370312 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7156862745098039, "acc_stderr": 0.03166009679399812, "acc_norm": 0.7156862745098039, "acc_norm_stderr": 0.03166009679399812 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7383966244725738, "acc_stderr": 0.028609516716994934, "acc_norm": 0.7383966244725738, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6188340807174888, "acc_stderr": 0.03259625118416828, "acc_norm": 0.6188340807174888, "acc_norm_stderr": 0.03259625118416828 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5725190839694656, "acc_stderr": 0.04338920305792399, "acc_norm": 0.5725190839694656, "acc_norm_stderr": 0.04338920305792399 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7107438016528925, "acc_stderr": 0.04139112727635463, "acc_norm": 0.7107438016528925, "acc_norm_stderr": 0.04139112727635463 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6388888888888888, "acc_stderr": 0.04643454608906275, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.04643454608906275 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6257668711656442, "acc_stderr": 0.03802068102899616, "acc_norm": 0.6257668711656442, "acc_norm_stderr": 0.03802068102899616 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2857142857142857, "acc_stderr": 0.042878587513404544, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.042878587513404544 }, "harness|hendrycksTest-management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8247863247863247, "acc_stderr": 0.02490443909891822, "acc_norm": 0.8247863247863247, "acc_norm_stderr": 0.02490443909891822 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001974, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7637292464878672, "acc_stderr": 0.015190473717037514, "acc_norm": 0.7637292464878672, "acc_norm_stderr": 0.015190473717037514 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6127167630057804, "acc_stderr": 0.026226158605124658, "acc_norm": 0.6127167630057804, "acc_norm_stderr": 0.026226158605124658 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24022346368715083, "acc_stderr": 0.014288343803925293, "acc_norm": 0.24022346368715083, "acc_norm_stderr": 0.014288343803925293 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5620915032679739, "acc_stderr": 0.028408302020332687, "acc_norm": 0.5620915032679739, "acc_norm_stderr": 0.028408302020332687 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.617363344051447, "acc_stderr": 0.027604689028581975, "acc_norm": 0.617363344051447, "acc_norm_stderr": 0.027604689028581975 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.654320987654321, "acc_stderr": 0.02646248777700187, "acc_norm": 0.654320987654321, "acc_norm_stderr": 0.02646248777700187 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.41843971631205673, "acc_stderr": 0.02942799403941999, "acc_norm": 0.41843971631205673, "acc_norm_stderr": 0.02942799403941999 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.39374185136897, "acc_stderr": 0.012478532272564439, "acc_norm": 0.39374185136897, "acc_norm_stderr": 0.012478532272564439 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.47794117647058826, "acc_stderr": 0.030343264224213528, "acc_norm": 0.47794117647058826, "acc_norm_stderr": 0.030343264224213528 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5424836601307189, "acc_stderr": 0.020154685712590895, "acc_norm": 0.5424836601307189, "acc_norm_stderr": 0.020154685712590895 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.04607582090719976, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.04607582090719976 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5673469387755102, "acc_stderr": 0.03171752824062664, "acc_norm": 0.5673469387755102, "acc_norm_stderr": 0.03171752824062664 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6965174129353234, "acc_stderr": 0.03251006816458618, "acc_norm": 0.6965174129353234, "acc_norm_stderr": 0.03251006816458618 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.78, "acc_stderr": 0.041633319989322626, "acc_norm": 0.78, "acc_norm_stderr": 0.041633319989322626 }, "harness|hendrycksTest-virology|5": { "acc": 0.46987951807228917, "acc_stderr": 0.03885425420866766, "acc_norm": 0.46987951807228917, "acc_norm_stderr": 0.03885425420866766 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7543859649122807, "acc_stderr": 0.03301405946987249, "acc_norm": 0.7543859649122807, "acc_norm_stderr": 0.03301405946987249 }, "harness|truthfulqa:mc|0": { "mc1": 0.25458996328029376, "mc1_stderr": 0.015250117079156494, "mc2": 0.3791772345992325, "mc2_stderr": 0.013816425829144595 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
marianna13/physics-stackexchange
2023-09-19T10:34:24.000Z
[ "region:us" ]
marianna13
null
null
null
0
0
Entry not found
open-llm-leaderboard/details_ValiantLabs__ShiningValiant
2023-09-22T22:27:25.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of ValiantLabs/ShiningValiant dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [ValiantLabs/ShiningValiant](https://huggingface.co/ValiantLabs/ShiningValiant)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_ValiantLabs__ShiningValiant\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-22T22:26:27.274686](https://huggingface.co/datasets/open-llm-leaderboard/details_ValiantLabs__ShiningValiant/blob/main/results_2023-09-22T22-26-27.274686.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6386983068475005,\n\ \ \"acc_stderr\": 0.032863621226889406,\n \"acc_norm\": 0.6425916297913504,\n\ \ \"acc_norm_stderr\": 0.03283781788418258,\n \"mc1\": 0.3561811505507956,\n\ \ \"mc1_stderr\": 0.016763790728446335,\n \"mc2\": 0.527991738544026,\n\ \ \"mc2_stderr\": 0.015530613367021443\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.6049488054607508,\n \"acc_stderr\": 0.01428589829293817,\n\ \ \"acc_norm\": 0.6450511945392492,\n \"acc_norm_stderr\": 0.013983036904094087\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6690898227444733,\n\ \ \"acc_stderr\": 0.004695791340502876,\n \"acc_norm\": 0.8586934873531169,\n\ \ \"acc_norm_stderr\": 0.003476255509644533\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411021,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411021\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5185185185185185,\n\ \ \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.5185185185185185,\n\ \ \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.7302631578947368,\n \"acc_stderr\": 0.03611780560284898,\n\ \ \"acc_norm\": 0.7302631578947368,\n \"acc_norm_stderr\": 0.03611780560284898\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n\ \ \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \ \ \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6377358490566037,\n \"acc_stderr\": 0.029582245128384303,\n\ \ \"acc_norm\": 0.6377358490566037,\n \"acc_norm_stderr\": 0.029582245128384303\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n\ \ \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \ \ \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \ \ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.59,\n \"acc_stderr\": 0.04943110704237101,\n \"acc_norm\"\ : 0.59,\n \"acc_norm_stderr\": 0.04943110704237101\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6069364161849711,\n\ \ \"acc_stderr\": 0.0372424959581773,\n \"acc_norm\": 0.6069364161849711,\n\ \ \"acc_norm_stderr\": 0.0372424959581773\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.04690650298201943,\n\ \ \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.04690650298201943\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n\ \ \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.032232762667117124,\n\ \ \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.032232762667117124\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.41228070175438597,\n\ \ \"acc_stderr\": 0.04630653203366595,\n \"acc_norm\": 0.41228070175438597,\n\ \ \"acc_norm_stderr\": 0.04630653203366595\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n\ \ \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.41005291005291006,\n \"acc_stderr\": 0.02533120243894442,\n \"\ acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.02533120243894442\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4126984126984127,\n\ \ \"acc_stderr\": 0.04403438954768176,\n \"acc_norm\": 0.4126984126984127,\n\ \ \"acc_norm_stderr\": 0.04403438954768176\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \ \ \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n\ \ \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n\ \ \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.4630541871921182,\n \"acc_stderr\": 0.035083705204426656,\n\ \ \"acc_norm\": 0.4630541871921182,\n \"acc_norm_stderr\": 0.035083705204426656\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.65,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\"\ : 0.65,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.793939393939394,\n \"acc_stderr\": 0.03158415324047709,\n\ \ \"acc_norm\": 0.793939393939394,\n \"acc_norm_stderr\": 0.03158415324047709\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.803030303030303,\n \"acc_stderr\": 0.028335609732463355,\n \"\ acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463355\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.022473253332768783,\n\ \ \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.022473253332768783\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.6410256410256411,\n \"acc_stderr\": 0.02432173848460235,\n \ \ \"acc_norm\": 0.6410256410256411,\n \"acc_norm_stderr\": 0.02432173848460235\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3,\n \"acc_stderr\": 0.027940457136228416,\n \"acc_norm\"\ : 0.3,\n \"acc_norm_stderr\": 0.027940457136228416\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\"\ : {\n \"acc\": 0.6596638655462185,\n \"acc_stderr\": 0.030778057422931673,\n\ \ \"acc_norm\": 0.6596638655462185,\n \"acc_norm_stderr\": 0.030778057422931673\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.4304635761589404,\n \"acc_stderr\": 0.04042809961395634,\n \"\ acc_norm\": 0.4304635761589404,\n \"acc_norm_stderr\": 0.04042809961395634\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.8385321100917431,\n \"acc_stderr\": 0.015776239256163255,\n \"\ acc_norm\": 0.8385321100917431,\n \"acc_norm_stderr\": 0.015776239256163255\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.48148148148148145,\n \"acc_stderr\": 0.03407632093854052,\n \"\ acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.03407632093854052\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.8529411764705882,\n \"acc_stderr\": 0.024857478080250458,\n \"\ acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.024857478080250458\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.8438818565400844,\n \"acc_stderr\": 0.02362715946031867,\n \ \ \"acc_norm\": 0.8438818565400844,\n \"acc_norm_stderr\": 0.02362715946031867\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.726457399103139,\n\ \ \"acc_stderr\": 0.02991858670779883,\n \"acc_norm\": 0.726457399103139,\n\ \ \"acc_norm_stderr\": 0.02991858670779883\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.7099236641221374,\n \"acc_stderr\": 0.039800662464677665,\n\ \ \"acc_norm\": 0.7099236641221374,\n \"acc_norm_stderr\": 0.039800662464677665\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990946,\n \"\ acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990946\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8240740740740741,\n\ \ \"acc_stderr\": 0.036809181416738807,\n \"acc_norm\": 0.8240740740740741,\n\ \ \"acc_norm_stderr\": 0.036809181416738807\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n\ \ \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.48214285714285715,\n\ \ \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.48214285714285715,\n\ \ \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822584,\n\ \ \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822584\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n\ \ \"acc_stderr\": 0.021586494001281372,\n \"acc_norm\": 0.8760683760683761,\n\ \ \"acc_norm_stderr\": 0.021586494001281372\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.65,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8275862068965517,\n\ \ \"acc_stderr\": 0.013507943909371798,\n \"acc_norm\": 0.8275862068965517,\n\ \ \"acc_norm_stderr\": 0.013507943909371798\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7167630057803468,\n \"acc_stderr\": 0.02425790170532338,\n\ \ \"acc_norm\": 0.7167630057803468,\n \"acc_norm_stderr\": 0.02425790170532338\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.39329608938547483,\n\ \ \"acc_stderr\": 0.01633726869427011,\n \"acc_norm\": 0.39329608938547483,\n\ \ \"acc_norm_stderr\": 0.01633726869427011\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6993464052287581,\n \"acc_stderr\": 0.026256053835718968,\n\ \ \"acc_norm\": 0.6993464052287581,\n \"acc_norm_stderr\": 0.026256053835718968\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n\ \ \"acc_stderr\": 0.02592237178881877,\n \"acc_norm\": 0.7041800643086816,\n\ \ \"acc_norm_stderr\": 0.02592237178881877\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.7098765432098766,\n \"acc_stderr\": 0.025251173936495036,\n\ \ \"acc_norm\": 0.7098765432098766,\n \"acc_norm_stderr\": 0.025251173936495036\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.5070921985815603,\n \"acc_stderr\": 0.02982449855912901,\n \ \ \"acc_norm\": 0.5070921985815603,\n \"acc_norm_stderr\": 0.02982449855912901\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4791395045632334,\n\ \ \"acc_stderr\": 0.01275911706651802,\n \"acc_norm\": 0.4791395045632334,\n\ \ \"acc_norm_stderr\": 0.01275911706651802\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5772058823529411,\n \"acc_stderr\": 0.030008562845003476,\n\ \ \"acc_norm\": 0.5772058823529411,\n \"acc_norm_stderr\": 0.030008562845003476\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.6666666666666666,\n \"acc_stderr\": 0.0190709855896875,\n \ \ \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.0190709855896875\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n\ \ \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n\ \ \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7877551020408163,\n \"acc_stderr\": 0.026176967197866767,\n\ \ \"acc_norm\": 0.7877551020408163,\n \"acc_norm_stderr\": 0.026176967197866767\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8706467661691543,\n\ \ \"acc_stderr\": 0.023729830881018526,\n \"acc_norm\": 0.8706467661691543,\n\ \ \"acc_norm_stderr\": 0.023729830881018526\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896309,\n \ \ \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896309\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n\ \ \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n\ \ \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640038,\n\ \ \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640038\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3561811505507956,\n\ \ \"mc1_stderr\": 0.016763790728446335,\n \"mc2\": 0.527991738544026,\n\ \ \"mc2_stderr\": 0.015530613367021443\n }\n}\n```" repo_url: https://huggingface.co/ValiantLabs/ShiningValiant leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|arc:challenge|25_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|arc:challenge|25_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hellaswag|10_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hellaswag|10_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T11:23:20.160582.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-22T22-26-27.274686.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-management|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-22T22-26-27.274686.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T11_23_20.160582 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T11:23:20.160582.parquet' - split: 2023_09_22T22_26_27.274686 path: - '**/details_harness|truthfulqa:mc|0_2023-09-22T22-26-27.274686.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-22T22-26-27.274686.parquet' - config_name: results data_files: - split: 2023_09_02T11_23_20.160582 path: - results_2023-09-02T11:23:20.160582.parquet - split: 2023_09_22T22_26_27.274686 path: - results_2023-09-22T22-26-27.274686.parquet - split: latest path: - results_2023-09-22T22-26-27.274686.parquet --- # Dataset Card for Evaluation run of ValiantLabs/ShiningValiant ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/ValiantLabs/ShiningValiant - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [ValiantLabs/ShiningValiant](https://huggingface.co/ValiantLabs/ShiningValiant) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_ValiantLabs__ShiningValiant", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-22T22:26:27.274686](https://huggingface.co/datasets/open-llm-leaderboard/details_ValiantLabs__ShiningValiant/blob/main/results_2023-09-22T22-26-27.274686.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6386983068475005, "acc_stderr": 0.032863621226889406, "acc_norm": 0.6425916297913504, "acc_norm_stderr": 0.03283781788418258, "mc1": 0.3561811505507956, "mc1_stderr": 0.016763790728446335, "mc2": 0.527991738544026, "mc2_stderr": 0.015530613367021443 }, "harness|arc:challenge|25": { "acc": 0.6049488054607508, "acc_stderr": 0.01428589829293817, "acc_norm": 0.6450511945392492, "acc_norm_stderr": 0.013983036904094087 }, "harness|hellaswag|10": { "acc": 0.6690898227444733, "acc_stderr": 0.004695791340502876, "acc_norm": 0.8586934873531169, "acc_norm_stderr": 0.003476255509644533 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411021, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411021 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5185185185185185, "acc_stderr": 0.043163785995113245, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7302631578947368, "acc_stderr": 0.03611780560284898, "acc_norm": 0.7302631578947368, "acc_norm_stderr": 0.03611780560284898 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6377358490566037, "acc_stderr": 0.029582245128384303, "acc_norm": 0.6377358490566037, "acc_norm_stderr": 0.029582245128384303 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.75, "acc_stderr": 0.03621034121889507, "acc_norm": 0.75, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.59, "acc_stderr": 0.04943110704237101, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237101 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6069364161849711, "acc_stderr": 0.0372424959581773, "acc_norm": 0.6069364161849711, "acc_norm_stderr": 0.0372424959581773 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04690650298201943, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04690650298201943 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.032232762667117124, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.032232762667117124 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.41228070175438597, "acc_stderr": 0.04630653203366595, "acc_norm": 0.41228070175438597, "acc_norm_stderr": 0.04630653203366595 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41005291005291006, "acc_stderr": 0.02533120243894442, "acc_norm": 0.41005291005291006, "acc_norm_stderr": 0.02533120243894442 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4126984126984127, "acc_stderr": 0.04403438954768176, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.04403438954768176 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7677419354838709, "acc_stderr": 0.024022256130308235, "acc_norm": 0.7677419354838709, "acc_norm_stderr": 0.024022256130308235 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4630541871921182, "acc_stderr": 0.035083705204426656, "acc_norm": 0.4630541871921182, "acc_norm_stderr": 0.035083705204426656 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.793939393939394, "acc_stderr": 0.03158415324047709, "acc_norm": 0.793939393939394, "acc_norm_stderr": 0.03158415324047709 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463355, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463355 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8911917098445595, "acc_stderr": 0.022473253332768783, "acc_norm": 0.8911917098445595, "acc_norm_stderr": 0.022473253332768783 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6410256410256411, "acc_stderr": 0.02432173848460235, "acc_norm": 0.6410256410256411, "acc_norm_stderr": 0.02432173848460235 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3, "acc_stderr": 0.027940457136228416, "acc_norm": 0.3, "acc_norm_stderr": 0.027940457136228416 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6596638655462185, "acc_stderr": 0.030778057422931673, "acc_norm": 0.6596638655462185, "acc_norm_stderr": 0.030778057422931673 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.4304635761589404, "acc_stderr": 0.04042809961395634, "acc_norm": 0.4304635761589404, "acc_norm_stderr": 0.04042809961395634 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8385321100917431, "acc_stderr": 0.015776239256163255, "acc_norm": 0.8385321100917431, "acc_norm_stderr": 0.015776239256163255 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.03407632093854052, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.03407632093854052 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8529411764705882, "acc_stderr": 0.024857478080250458, "acc_norm": 0.8529411764705882, "acc_norm_stderr": 0.024857478080250458 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8438818565400844, "acc_stderr": 0.02362715946031867, "acc_norm": 0.8438818565400844, "acc_norm_stderr": 0.02362715946031867 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.726457399103139, "acc_stderr": 0.02991858670779883, "acc_norm": 0.726457399103139, "acc_norm_stderr": 0.02991858670779883 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7099236641221374, "acc_stderr": 0.039800662464677665, "acc_norm": 0.7099236641221374, "acc_norm_stderr": 0.039800662464677665 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990946, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990946 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8240740740740741, "acc_stderr": 0.036809181416738807, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.036809181416738807 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.033519538795212696, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.48214285714285715, "acc_stderr": 0.047427623612430116, "acc_norm": 0.48214285714285715, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.8058252427184466, "acc_stderr": 0.03916667762822584, "acc_norm": 0.8058252427184466, "acc_norm_stderr": 0.03916667762822584 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.021586494001281372, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.021586494001281372 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.65, "acc_stderr": 0.047937248544110196, "acc_norm": 0.65, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8275862068965517, "acc_stderr": 0.013507943909371798, "acc_norm": 0.8275862068965517, "acc_norm_stderr": 0.013507943909371798 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7167630057803468, "acc_stderr": 0.02425790170532338, "acc_norm": 0.7167630057803468, "acc_norm_stderr": 0.02425790170532338 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.39329608938547483, "acc_stderr": 0.01633726869427011, "acc_norm": 0.39329608938547483, "acc_norm_stderr": 0.01633726869427011 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6993464052287581, "acc_stderr": 0.026256053835718968, "acc_norm": 0.6993464052287581, "acc_norm_stderr": 0.026256053835718968 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7041800643086816, "acc_stderr": 0.02592237178881877, "acc_norm": 0.7041800643086816, "acc_norm_stderr": 0.02592237178881877 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7098765432098766, "acc_stderr": 0.025251173936495036, "acc_norm": 0.7098765432098766, "acc_norm_stderr": 0.025251173936495036 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5070921985815603, "acc_stderr": 0.02982449855912901, "acc_norm": 0.5070921985815603, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4791395045632334, "acc_stderr": 0.01275911706651802, "acc_norm": 0.4791395045632334, "acc_norm_stderr": 0.01275911706651802 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5772058823529411, "acc_stderr": 0.030008562845003476, "acc_norm": 0.5772058823529411, "acc_norm_stderr": 0.030008562845003476 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6666666666666666, "acc_stderr": 0.0190709855896875, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.0190709855896875 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7877551020408163, "acc_stderr": 0.026176967197866767, "acc_norm": 0.7877551020408163, "acc_norm_stderr": 0.026176967197866767 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8706467661691543, "acc_stderr": 0.023729830881018526, "acc_norm": 0.8706467661691543, "acc_norm_stderr": 0.023729830881018526 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.03379976689896309, "acc_norm": 0.87, "acc_norm_stderr": 0.03379976689896309 }, "harness|hendrycksTest-virology|5": { "acc": 0.5120481927710844, "acc_stderr": 0.03891364495835817, "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.03891364495835817 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.029547741687640038, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.029547741687640038 }, "harness|truthfulqa:mc|0": { "mc1": 0.3561811505507956, "mc1_stderr": 0.016763790728446335, "mc2": 0.527991738544026, "mc2_stderr": 0.015530613367021443 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
Yakonrus/testing
2023-09-02T11:31:29.000Z
[ "region:us" ]
Yakonrus
null
null
null
0
0
Entry not found
open-llm-leaderboard/details_robowaifudev__megatron-gpt2-345m
2023-09-17T04:50:07.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of robowaifudev/megatron-gpt2-345m dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [robowaifudev/megatron-gpt2-345m](https://huggingface.co/robowaifudev/megatron-gpt2-345m)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_robowaifudev__megatron-gpt2-345m\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-09-17T04:49:55.562605](https://huggingface.co/datasets/open-llm-leaderboard/details_robowaifudev__megatron-gpt2-345m/blob/main/results_2023-09-17T04-49-55.562605.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0019924496644295304,\n\ \ \"em_stderr\": 0.00045666764626669577,\n \"f1\": 0.05044882550335569,\n\ \ \"f1_stderr\": 0.0012876675201095459,\n \"acc\": 0.26593596234501155,\n\ \ \"acc_stderr\": 0.007670000821353762\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.0019924496644295304,\n \"em_stderr\": 0.00045666764626669577,\n\ \ \"f1\": 0.05044882550335569,\n \"f1_stderr\": 0.0012876675201095459\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.002274450341167551,\n \ \ \"acc_stderr\": 0.0013121578148674363\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.5295974743488555,\n \"acc_stderr\": 0.014027843827840086\n\ \ }\n}\n```" repo_url: https://huggingface.co/robowaifudev/megatron-gpt2-345m leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|arc:challenge|25_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T11:46:58.393121.parquet' - config_name: harness_drop_3 data_files: - split: 2023_09_17T04_49_55.562605 path: - '**/details_harness|drop|3_2023-09-17T04-49-55.562605.parquet' - split: latest path: - '**/details_harness|drop|3_2023-09-17T04-49-55.562605.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_09_17T04_49_55.562605 path: - '**/details_harness|gsm8k|5_2023-09-17T04-49-55.562605.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-09-17T04-49-55.562605.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hellaswag|10_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T11:46:58.393121.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T11:46:58.393121.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T11_46_58.393121 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T11:46:58.393121.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T11:46:58.393121.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_09_17T04_49_55.562605 path: - '**/details_harness|winogrande|5_2023-09-17T04-49-55.562605.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-09-17T04-49-55.562605.parquet' - config_name: results data_files: - split: 2023_09_02T11_46_58.393121 path: - results_2023-09-02T11:46:58.393121.parquet - split: 2023_09_17T04_49_55.562605 path: - results_2023-09-17T04-49-55.562605.parquet - split: latest path: - results_2023-09-17T04-49-55.562605.parquet --- # Dataset Card for Evaluation run of robowaifudev/megatron-gpt2-345m ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/robowaifudev/megatron-gpt2-345m - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [robowaifudev/megatron-gpt2-345m](https://huggingface.co/robowaifudev/megatron-gpt2-345m) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_robowaifudev__megatron-gpt2-345m", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-09-17T04:49:55.562605](https://huggingface.co/datasets/open-llm-leaderboard/details_robowaifudev__megatron-gpt2-345m/blob/main/results_2023-09-17T04-49-55.562605.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0019924496644295304, "em_stderr": 0.00045666764626669577, "f1": 0.05044882550335569, "f1_stderr": 0.0012876675201095459, "acc": 0.26593596234501155, "acc_stderr": 0.007670000821353762 }, "harness|drop|3": { "em": 0.0019924496644295304, "em_stderr": 0.00045666764626669577, "f1": 0.05044882550335569, "f1_stderr": 0.0012876675201095459 }, "harness|gsm8k|5": { "acc": 0.002274450341167551, "acc_stderr": 0.0013121578148674363 }, "harness|winogrande|5": { "acc": 0.5295974743488555, "acc_stderr": 0.014027843827840086 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
Niankaphell/Arsenium-Voice
2023-09-02T11:48:53.000Z
[ "license:openrail", "region:us" ]
Niankaphell
null
null
null
0
0
--- license: openrail ---
vantagedashcam/vantagedashcam
2023-09-02T11:52:29.000Z
[ "region:us" ]
vantagedashcam
null
null
null
0
0
Introduction ============ In today's fast-paced world, technological advancements have revolutionized various aspects of our lives, including the way we drive. One such innovation that has gained immense popularity among drivers is the **Vantage DashCam**. This article will delve into the world of Vantage DashCams, highlighting their benefits, features, and how they can enhance your driving experience. So, fasten your seatbelts as we embark on a journey to discover the incredible advantages of this cutting-edge device. [Click Here Get From Official Website](https://www.glitco.com/get-vantage-dashcam) What Is a Vantage DashCam? -------------------------- ### Understanding the Basics Before we dive into the advantages, let's get a clear understanding of what a Vantage DashCam is. Essentially, it's a compact video camera designed to be mounted on your car's dashboard or windshield. Its primary purpose is to record everything that happens on the road while you're driving, providing you with a visual record of your journey. [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgoFaoMMaazxobj5Tbh6ufR69haaj8dl4whMPXBoXqbOE5BYs2XJTqBnB3A1zEFOPOMsufxce4KBx0pTQ_pgp0JGZD-vOGt_ULzPtXl9JtRkCbkPU4f8U66ByR1Dx_eQQVqZLE_YKRWMIPOH9fo6CaChOaESq0ZFeEKLJhf94fvn4zKiZp2QPlRP3iJMnU/w640-h398/Screenshot%20(1093).png)](https://www.glitco.com/get-vantage-dashcam) The Benefits of Owning a Vantage DashCam ---------------------------------------- ### 1\. **Enhanced Safety** Safety is a paramount concern for every driver. A Vantage DashCam acts as your silent witness on the road. It records accidents, collisions, and even acts of vandalism, offering crucial evidence in case of disputes or insurance claims. This added layer of protection can give you peace of mind every time you hit the road. ### 2\. **Preventing Insurance Fraud** Insurance fraud is an unfortunate reality. With a Vantage DashCam, you can thwart any fraudulent claims by providing concrete video evidence of accidents. This not only safeguards your interests but also contributes to reducing insurance fraud overall. ### 3\. **Monitoring Teenage Drivers** If you have teenage drivers in your household, a Vantage DashCam can help you monitor their driving habits. You can review their trips, ensuring they drive safely and responsibly, and have constructive discussions based on the footage. ### 4\. **Capturing Memorable Road Trips** On a lighter note, Vantage DashCams can capture your memorable road trips. Relive your adventures and scenic drives by watching the footage later, and share your experiences with friends and family. [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEiDaqYe1pJ25JYPWeuZoDGIDSiO55nTo_oG94WCzr4A3-zgP6HJF82HMThiyLVIxW298NXoNNJ_QYmcPJsJ8pF6rscwnIytUi02X8slRAPVt0yCBJs4QBa8zy0y6_4PCgcEPPyA-mQEytIXtIX9Cgn8dpjt2W9mrCTAzpQscTK1UqpgL6Oyo3YsoKVJhAk/w640-h512/Screenshot%20(1095).png)](https://www.glitco.com/get-vantage-dashcam) How Vantage DashCams Work ------------------------- ### 5\. **Continuous Loop Recording** Vantage DashCams typically use continuous loop recording. This means that once the storage is full, it automatically overwrites the oldest footage, ensuring you never run out of recording space. ### 6\. **G-Sensor Technology** Many Vantage DashCams are equipped with G-sensor technology. In the event of a collision, sudden stop, or impact, the sensor triggers the camera to save the current recording as a locked file, preventing it from being overwritten. [Click Here Get From Official Website](https://www.glitco.com/get-vantage-dashcam) Choosing the Right Vantage DashCam ---------------------------------- ### 7\. **Consider Your Budget** Vantage DashCams come in various price ranges. It's essential to determine your budget and choose a model that offers the features you need without breaking the bank. ### 8\. **Video Quality** The video quality of DashCams can vary significantly. Opt for a model with at least 1080p resolution for clear and crisp recordings. ### 9\. **Field of View** The field of view determines how much of the road is captured. A wider field of view is ideal for capturing more details. [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgM_svVBKtwXQCy2bTiDfu6PP9yqet5G5z9ULGW9IXbUzBqHmkkM9EItSATeaLzUimfCFuHSSUXnU4WHRPuPmB7eN9HWICP_RO-DrIIiBCvb4nU8KLigi4tNreF8CKyWlr2uOCaVcAZktcHFkHGaMwW_fb6bI9p7y0dwWIcl2u7BJGDva18mZzWDXqSy-s/w640-h280/Screenshot%20(1096).png)](https://www.glitco.com/get-vantage-dashcam) Installing Your Vantage DashCam ------------------------------- ### 10\. **Mounting Options** Most DashCams can be mounted on the windshield or dashboard using adhesive mounts or suction cups. Ensure proper placement for an unobstructed view of the road. ### 11\. **Wiring and Power** Consider the power source for your DashCam. Some models are hardwired to your car's electrical system, while others use a cigarette lighter adapter. Conclusion ---------- In conclusion, a Vantage DashCam is more than just a gadget; it's your trusted road companion. It enhances safety, prevents fraud, and allows you to capture unforgettable moments. Before you hit the road, consider investing in a Vantage DashCam to enjoy these benefits and drive with confidence. [Click Here Get From Official Website](https://www.glitco.com/get-vantage-dashcam) FAQs (Frequently Asked Questions) --------------------------------- ### 1\. Is a Vantage DashCam easy to install? Yes, most Vantage DashCams come with straightforward installation instructions and can be easily mounted in your car. ### 2\. Can I use a Vantage DashCam in all weather conditions? Yes, Vantage DashCams are designed to withstand various weather conditions, ensuring they function optimally regardless of the climate. ### 3\. How much storage do I need for a Vantage DashCam? The required storage capacity depends on your recording preferences. However, DashCams often come with expandable storage options to suit your needs. ### 4\. Are Vantage DashCams legal to use? In most regions, Vantage DashCams are legal to use for personal purposes. However, it's essential to check your local laws and regulations. ### 5\. Can I view DashCam footage on my smartphone? Yes, many Vantage DashCams offer smartphone connectivity, allowing you to view and manage your recordings conveniently. [![](https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEgXWeNd5FFO-0yYd1oll8JZuscuPNjvcsx2U5h74SzNe3USAhQg69HaNmvma8pXsVLCB-dfUR9yPiDM6wtyNm28nLkgxKaTJYW8kURCzY3y_OVve7j6DqnK97Wt55qsPmTSq4L6vAkcyGb4xyt7OGki8rXW-hy7w8QVeNw8jF69PAZvA0JXZah-hUlLlz8/w640-h392/Screenshot%20(1094).png)](https://www.glitco.com/get-vantage-dashcam) [Click Here Get From Official Website](https://www.glitco.com/get-vantage-dashcam)
open-llm-leaderboard/details_gpt2-xl
2023-09-02T12:02:49.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of gpt2-xl dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [gpt2-xl](https://huggingface.co/gpt2-xl) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_gpt2-xl\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-02T12:01:37.784135](https://huggingface.co/datasets/open-llm-leaderboard/details_gpt2-xl/blob/main/results_2023-09-02T12%3A01%3A37.784135.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2665161542962616,\n\ \ \"acc_stderr\": 0.03209524333958264,\n \"acc_norm\": 0.2692149570006133,\n\ \ \"acc_norm_stderr\": 0.032107492839961255,\n \"mc1\": 0.22031823745410037,\n\ \ \"mc1_stderr\": 0.0145090451714873,\n \"mc2\": 0.3853664594422386,\n\ \ \"mc2_stderr\": 0.014057369285913092\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.2593856655290102,\n \"acc_stderr\": 0.012808273573927104,\n\ \ \"acc_norm\": 0.302901023890785,\n \"acc_norm_stderr\": 0.013428241573185347\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.3981278629755029,\n\ \ \"acc_stderr\": 0.004885116465550275,\n \"acc_norm\": 0.5138418641704839,\n\ \ \"acc_norm_stderr\": 0.004987868988630006\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2074074074074074,\n\ \ \"acc_stderr\": 0.03502553170678318,\n \"acc_norm\": 0.2074074074074074,\n\ \ \"acc_norm_stderr\": 0.03502553170678318\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.19078947368421054,\n \"acc_stderr\": 0.031975658210324984,\n\ \ \"acc_norm\": 0.19078947368421054,\n \"acc_norm_stderr\": 0.031975658210324984\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.27,\n\ \ \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\": 0.27,\n \ \ \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.29056603773584905,\n \"acc_stderr\": 0.027943219989337145,\n\ \ \"acc_norm\": 0.29056603773584905,\n \"acc_norm_stderr\": 0.027943219989337145\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2847222222222222,\n\ \ \"acc_stderr\": 0.03773809990686935,\n \"acc_norm\": 0.2847222222222222,\n\ \ \"acc_norm_stderr\": 0.03773809990686935\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \ \ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n\ \ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3063583815028902,\n\ \ \"acc_stderr\": 0.03514942551267437,\n \"acc_norm\": 0.3063583815028902,\n\ \ \"acc_norm_stderr\": 0.03514942551267437\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.18627450980392157,\n \"acc_stderr\": 0.03873958714149351,\n\ \ \"acc_norm\": 0.18627450980392157,\n \"acc_norm_stderr\": 0.03873958714149351\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n\ \ \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.2978723404255319,\n \"acc_stderr\": 0.029896145682095455,\n\ \ \"acc_norm\": 0.2978723404255319,\n \"acc_norm_stderr\": 0.029896145682095455\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\ \ \"acc_stderr\": 0.039994238792813344,\n \"acc_norm\": 0.23684210526315788,\n\ \ \"acc_norm_stderr\": 0.039994238792813344\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2620689655172414,\n \"acc_stderr\": 0.03664666337225256,\n\ \ \"acc_norm\": 0.2620689655172414,\n \"acc_norm_stderr\": 0.03664666337225256\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.24074074074074073,\n \"acc_stderr\": 0.0220190800122179,\n \"\ acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.0220190800122179\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3412698412698413,\n\ \ \"acc_stderr\": 0.04240799327574925,\n \"acc_norm\": 0.3412698412698413,\n\ \ \"acc_norm_stderr\": 0.04240799327574925\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \ \ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.24838709677419354,\n\ \ \"acc_stderr\": 0.024580028921481003,\n \"acc_norm\": 0.24838709677419354,\n\ \ \"acc_norm_stderr\": 0.024580028921481003\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.21182266009852216,\n \"acc_stderr\": 0.028748983689941054,\n\ \ \"acc_norm\": 0.21182266009852216,\n \"acc_norm_stderr\": 0.028748983689941054\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\"\ : 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.3090909090909091,\n \"acc_stderr\": 0.036085410115739666,\n\ \ \"acc_norm\": 0.3090909090909091,\n \"acc_norm_stderr\": 0.036085410115739666\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.2777777777777778,\n \"acc_stderr\": 0.03191178226713548,\n \"\ acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.03191178226713548\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.21761658031088082,\n \"acc_stderr\": 0.02977866303775296,\n\ \ \"acc_norm\": 0.21761658031088082,\n \"acc_norm_stderr\": 0.02977866303775296\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.3435897435897436,\n \"acc_stderr\": 0.024078696580635474,\n\ \ \"acc_norm\": 0.3435897435897436,\n \"acc_norm_stderr\": 0.024078696580635474\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.25555555555555554,\n \"acc_stderr\": 0.026593939101844065,\n \ \ \"acc_norm\": 0.25555555555555554,\n \"acc_norm_stderr\": 0.026593939101844065\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.22268907563025211,\n \"acc_stderr\": 0.02702543349888239,\n\ \ \"acc_norm\": 0.22268907563025211,\n \"acc_norm_stderr\": 0.02702543349888239\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.26490066225165565,\n \"acc_stderr\": 0.03603038545360384,\n \"\ acc_norm\": 0.26490066225165565,\n \"acc_norm_stderr\": 0.03603038545360384\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.3284403669724771,\n \"acc_stderr\": 0.020135902797298388,\n \"\ acc_norm\": 0.3284403669724771,\n \"acc_norm_stderr\": 0.020135902797298388\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.39351851851851855,\n \"acc_stderr\": 0.03331747876370312,\n \"\ acc_norm\": 0.39351851851851855,\n \"acc_norm_stderr\": 0.03331747876370312\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.22549019607843138,\n \"acc_stderr\": 0.029331162294251714,\n \"\ acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.029331162294251714\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.2489451476793249,\n \"acc_stderr\": 0.028146970599422644,\n \ \ \"acc_norm\": 0.2489451476793249,\n \"acc_norm_stderr\": 0.028146970599422644\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.18834080717488788,\n\ \ \"acc_stderr\": 0.026241132996407266,\n \"acc_norm\": 0.18834080717488788,\n\ \ \"acc_norm_stderr\": 0.026241132996407266\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.25190839694656486,\n \"acc_stderr\": 0.038073871163060866,\n\ \ \"acc_norm\": 0.25190839694656486,\n \"acc_norm_stderr\": 0.038073871163060866\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2809917355371901,\n \"acc_stderr\": 0.04103203830514512,\n \"\ acc_norm\": 0.2809917355371901,\n \"acc_norm_stderr\": 0.04103203830514512\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.2962962962962963,\n\ \ \"acc_stderr\": 0.044143436668549335,\n \"acc_norm\": 0.2962962962962963,\n\ \ \"acc_norm_stderr\": 0.044143436668549335\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.2392638036809816,\n \"acc_stderr\": 0.03351953879521269,\n\ \ \"acc_norm\": 0.2392638036809816,\n \"acc_norm_stderr\": 0.03351953879521269\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n\ \ \"acc_stderr\": 0.04203277291467762,\n \"acc_norm\": 0.26785714285714285,\n\ \ \"acc_norm_stderr\": 0.04203277291467762\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.2621359223300971,\n \"acc_stderr\": 0.04354631077260597,\n\ \ \"acc_norm\": 0.2621359223300971,\n \"acc_norm_stderr\": 0.04354631077260597\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.26495726495726496,\n\ \ \"acc_stderr\": 0.02891120880274948,\n \"acc_norm\": 0.26495726495726496,\n\ \ \"acc_norm_stderr\": 0.02891120880274948\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \ \ \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2681992337164751,\n\ \ \"acc_stderr\": 0.015842430835269445,\n \"acc_norm\": 0.2681992337164751,\n\ \ \"acc_norm_stderr\": 0.015842430835269445\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.2398843930635838,\n \"acc_stderr\": 0.02298959254312357,\n\ \ \"acc_norm\": 0.2398843930635838,\n \"acc_norm_stderr\": 0.02298959254312357\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n\ \ \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n\ \ \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.28104575163398693,\n \"acc_stderr\": 0.02573885479781873,\n\ \ \"acc_norm\": 0.28104575163398693,\n \"acc_norm_stderr\": 0.02573885479781873\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.21221864951768488,\n\ \ \"acc_stderr\": 0.023222756797435122,\n \"acc_norm\": 0.21221864951768488,\n\ \ \"acc_norm_stderr\": 0.023222756797435122\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.24382716049382716,\n \"acc_stderr\": 0.023891879541959614,\n\ \ \"acc_norm\": 0.24382716049382716,\n \"acc_norm_stderr\": 0.023891879541959614\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432407,\n \ \ \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432407\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.25945241199478486,\n\ \ \"acc_stderr\": 0.011195262076350316,\n \"acc_norm\": 0.25945241199478486,\n\ \ \"acc_norm_stderr\": 0.011195262076350316\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.20955882352941177,\n \"acc_stderr\": 0.02472311040767705,\n\ \ \"acc_norm\": 0.20955882352941177,\n \"acc_norm_stderr\": 0.02472311040767705\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.24019607843137256,\n \"acc_stderr\": 0.017282760695167432,\n \ \ \"acc_norm\": 0.24019607843137256,\n \"acc_norm_stderr\": 0.017282760695167432\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.3181818181818182,\n\ \ \"acc_stderr\": 0.04461272175910508,\n \"acc_norm\": 0.3181818181818182,\n\ \ \"acc_norm_stderr\": 0.04461272175910508\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.2897959183673469,\n \"acc_stderr\": 0.02904308868330435,\n\ \ \"acc_norm\": 0.2897959183673469,\n \"acc_norm_stderr\": 0.02904308868330435\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24875621890547264,\n\ \ \"acc_stderr\": 0.030567675938916714,\n \"acc_norm\": 0.24875621890547264,\n\ \ \"acc_norm_stderr\": 0.030567675938916714\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \ \ \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3373493975903614,\n\ \ \"acc_stderr\": 0.03680783690727581,\n \"acc_norm\": 0.3373493975903614,\n\ \ \"acc_norm_stderr\": 0.03680783690727581\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.32748538011695905,\n \"acc_stderr\": 0.03599335771456027,\n\ \ \"acc_norm\": 0.32748538011695905,\n \"acc_norm_stderr\": 0.03599335771456027\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.22031823745410037,\n\ \ \"mc1_stderr\": 0.0145090451714873,\n \"mc2\": 0.3853664594422386,\n\ \ \"mc2_stderr\": 0.014057369285913092\n }\n}\n```" repo_url: https://huggingface.co/gpt2-xl leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|arc:challenge|25_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hellaswag|10_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T12:01:37.784135.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T12:01:37.784135.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T12_01_37.784135 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T12:01:37.784135.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T12:01:37.784135.parquet' - config_name: results data_files: - split: 2023_09_02T12_01_37.784135 path: - results_2023-09-02T12:01:37.784135.parquet - split: latest path: - results_2023-09-02T12:01:37.784135.parquet --- # Dataset Card for Evaluation run of gpt2-xl ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/gpt2-xl - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [gpt2-xl](https://huggingface.co/gpt2-xl) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_gpt2-xl", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-02T12:01:37.784135](https://huggingface.co/datasets/open-llm-leaderboard/details_gpt2-xl/blob/main/results_2023-09-02T12%3A01%3A37.784135.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2665161542962616, "acc_stderr": 0.03209524333958264, "acc_norm": 0.2692149570006133, "acc_norm_stderr": 0.032107492839961255, "mc1": 0.22031823745410037, "mc1_stderr": 0.0145090451714873, "mc2": 0.3853664594422386, "mc2_stderr": 0.014057369285913092 }, "harness|arc:challenge|25": { "acc": 0.2593856655290102, "acc_stderr": 0.012808273573927104, "acc_norm": 0.302901023890785, "acc_norm_stderr": 0.013428241573185347 }, "harness|hellaswag|10": { "acc": 0.3981278629755029, "acc_stderr": 0.004885116465550275, "acc_norm": 0.5138418641704839, "acc_norm_stderr": 0.004987868988630006 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768081, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2074074074074074, "acc_stderr": 0.03502553170678318, "acc_norm": 0.2074074074074074, "acc_norm_stderr": 0.03502553170678318 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.19078947368421054, "acc_stderr": 0.031975658210324984, "acc_norm": 0.19078947368421054, "acc_norm_stderr": 0.031975658210324984 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.27, "acc_stderr": 0.04461960433384739, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384739 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.29056603773584905, "acc_stderr": 0.027943219989337145, "acc_norm": 0.29056603773584905, "acc_norm_stderr": 0.027943219989337145 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2847222222222222, "acc_stderr": 0.03773809990686935, "acc_norm": 0.2847222222222222, "acc_norm_stderr": 0.03773809990686935 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3063583815028902, "acc_stderr": 0.03514942551267437, "acc_norm": 0.3063583815028902, "acc_norm_stderr": 0.03514942551267437 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.18627450980392157, "acc_stderr": 0.03873958714149351, "acc_norm": 0.18627450980392157, "acc_norm_stderr": 0.03873958714149351 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2978723404255319, "acc_stderr": 0.029896145682095455, "acc_norm": 0.2978723404255319, "acc_norm_stderr": 0.029896145682095455 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813344, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813344 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2620689655172414, "acc_stderr": 0.03664666337225256, "acc_norm": 0.2620689655172414, "acc_norm_stderr": 0.03664666337225256 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24074074074074073, "acc_stderr": 0.0220190800122179, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.0220190800122179 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3412698412698413, "acc_stderr": 0.04240799327574925, "acc_norm": 0.3412698412698413, "acc_norm_stderr": 0.04240799327574925 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.24838709677419354, "acc_stderr": 0.024580028921481003, "acc_norm": 0.24838709677419354, "acc_norm_stderr": 0.024580028921481003 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.21182266009852216, "acc_stderr": 0.028748983689941054, "acc_norm": 0.21182266009852216, "acc_norm_stderr": 0.028748983689941054 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.3090909090909091, "acc_stderr": 0.036085410115739666, "acc_norm": 0.3090909090909091, "acc_norm_stderr": 0.036085410115739666 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2777777777777778, "acc_stderr": 0.03191178226713548, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.03191178226713548 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.21761658031088082, "acc_stderr": 0.02977866303775296, "acc_norm": 0.21761658031088082, "acc_norm_stderr": 0.02977866303775296 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3435897435897436, "acc_stderr": 0.024078696580635474, "acc_norm": 0.3435897435897436, "acc_norm_stderr": 0.024078696580635474 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25555555555555554, "acc_stderr": 0.026593939101844065, "acc_norm": 0.25555555555555554, "acc_norm_stderr": 0.026593939101844065 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.22268907563025211, "acc_stderr": 0.02702543349888239, "acc_norm": 0.22268907563025211, "acc_norm_stderr": 0.02702543349888239 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.26490066225165565, "acc_stderr": 0.03603038545360384, "acc_norm": 0.26490066225165565, "acc_norm_stderr": 0.03603038545360384 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3284403669724771, "acc_stderr": 0.020135902797298388, "acc_norm": 0.3284403669724771, "acc_norm_stderr": 0.020135902797298388 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.39351851851851855, "acc_stderr": 0.03331747876370312, "acc_norm": 0.39351851851851855, "acc_norm_stderr": 0.03331747876370312 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.22549019607843138, "acc_stderr": 0.029331162294251714, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.029331162294251714 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2489451476793249, "acc_stderr": 0.028146970599422644, "acc_norm": 0.2489451476793249, "acc_norm_stderr": 0.028146970599422644 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.18834080717488788, "acc_stderr": 0.026241132996407266, "acc_norm": 0.18834080717488788, "acc_norm_stderr": 0.026241132996407266 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.038073871163060866, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.038073871163060866 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2809917355371901, "acc_stderr": 0.04103203830514512, "acc_norm": 0.2809917355371901, "acc_norm_stderr": 0.04103203830514512 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.2962962962962963, "acc_stderr": 0.044143436668549335, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.044143436668549335 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2392638036809816, "acc_stderr": 0.03351953879521269, "acc_norm": 0.2392638036809816, "acc_norm_stderr": 0.03351953879521269 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467762, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467762 }, "harness|hendrycksTest-management|5": { "acc": 0.2621359223300971, "acc_stderr": 0.04354631077260597, "acc_norm": 0.2621359223300971, "acc_norm_stderr": 0.04354631077260597 }, "harness|hendrycksTest-marketing|5": { "acc": 0.26495726495726496, "acc_stderr": 0.02891120880274948, "acc_norm": 0.26495726495726496, "acc_norm_stderr": 0.02891120880274948 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2681992337164751, "acc_stderr": 0.015842430835269445, "acc_norm": 0.2681992337164751, "acc_norm_stderr": 0.015842430835269445 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2398843930635838, "acc_stderr": 0.02298959254312357, "acc_norm": 0.2398843930635838, "acc_norm_stderr": 0.02298959254312357 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.28104575163398693, "acc_stderr": 0.02573885479781873, "acc_norm": 0.28104575163398693, "acc_norm_stderr": 0.02573885479781873 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.21221864951768488, "acc_stderr": 0.023222756797435122, "acc_norm": 0.21221864951768488, "acc_norm_stderr": 0.023222756797435122 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.24382716049382716, "acc_stderr": 0.023891879541959614, "acc_norm": 0.24382716049382716, "acc_norm_stderr": 0.023891879541959614 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432407, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432407 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.25945241199478486, "acc_stderr": 0.011195262076350316, "acc_norm": 0.25945241199478486, "acc_norm_stderr": 0.011195262076350316 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.20955882352941177, "acc_stderr": 0.02472311040767705, "acc_norm": 0.20955882352941177, "acc_norm_stderr": 0.02472311040767705 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.24019607843137256, "acc_stderr": 0.017282760695167432, "acc_norm": 0.24019607843137256, "acc_norm_stderr": 0.017282760695167432 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.3181818181818182, "acc_stderr": 0.04461272175910508, "acc_norm": 0.3181818181818182, "acc_norm_stderr": 0.04461272175910508 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2897959183673469, "acc_stderr": 0.02904308868330435, "acc_norm": 0.2897959183673469, "acc_norm_stderr": 0.02904308868330435 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24875621890547264, "acc_stderr": 0.030567675938916714, "acc_norm": 0.24875621890547264, "acc_norm_stderr": 0.030567675938916714 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-virology|5": { "acc": 0.3373493975903614, "acc_stderr": 0.03680783690727581, "acc_norm": 0.3373493975903614, "acc_norm_stderr": 0.03680783690727581 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.32748538011695905, "acc_stderr": 0.03599335771456027, "acc_norm": 0.32748538011695905, "acc_norm_stderr": 0.03599335771456027 }, "harness|truthfulqa:mc|0": { "mc1": 0.22031823745410037, "mc1_stderr": 0.0145090451714873, "mc2": 0.3853664594422386, "mc2_stderr": 0.014057369285913092 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
baebee/SiLM-SChat
2023-09-02T12:10:16.000Z
[ "region:us" ]
baebee
null
null
null
0
0
Entry not found
polymath707/aseanllama2-instruct-without-emojis
2023-09-02T12:28:55.000Z
[ "license:apache-2.0", "region:us" ]
polymath707
null
null
null
0
0
--- license: apache-2.0 ---
kingdsl/LOGO_TFM_3K
2023-09-02T12:37:24.000Z
[ "region:us" ]
kingdsl
null
null
null
0
0
Entry not found
DucHaiten/DucHaiten-realistic-SDXL
2023-09-04T14:18:38.000Z
[ "license:creativeml-openrail-m", "region:us" ]
DucHaiten
null
null
null
1
0
--- license: creativeml-openrail-m ---
vhtran/en-iden
2023-09-02T12:52:09.000Z
[ "license:cc-by-4.0", "region:us" ]
vhtran
null
null
null
0
0
--- license: cc-by-4.0 ---
AdithyaSK/CompanionLLama_instruction_30k
2023-09-02T13:10:17.000Z
[ "region:us" ]
AdithyaSK
null
null
null
0
0
--- dataset_info: features: - name: text dtype: string splits: - name: train num_bytes: 18507344 num_examples: 29655 download_size: 9059972 dataset_size: 18507344 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "CompanionLLama_instruction_30k" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
vhtran/en-en
2023-09-02T13:11:30.000Z
[ "license:cc-by-4.0", "region:us" ]
vhtran
null
null
null
0
0
--- license: cc-by-4.0 ---
AdithyaSK/CompanionLLama_Instruction_Memeory_30k
2023-09-02T13:15:48.000Z
[ "region:us" ]
AdithyaSK
null
null
null
0
0
--- dataset_info: features: - name: text dtype: string splits: - name: train num_bytes: 62546915 num_examples: 29655 download_size: 13825217 dataset_size: 62546915 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "CompanionLLama_Instruction_Memeory_30k" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
polymath707/aseanllama2-handmade
2023-09-02T13:15:51.000Z
[ "license:apache-2.0", "region:us" ]
polymath707
null
null
null
0
0
--- license: apache-2.0 ---
vivaprimemepills/Viva-Prime-Male-Enhancement
2023-09-02T13:33:06.000Z
[ "region:us" ]
vivaprimemepills
null
null
null
0
0
<h2 style="text-align: center;"><a style="color: black;" href="https://sale365day.com/get-viva-prime-me">Click Here -- Official Website -- Order Now</a></h2> <h2 style="text-align: center;"><span style="color: red;">⚠️Beware Of Fake Websites⚠️</span></h2> <p style="font-family: 'Open Sans', sans-serif; font-size: 16px; text-align: left;"><strong>✔For Order Official Website - <a href="https://sale365day.com/get-viva-prime-me">https://sale365day.com/get-viva-prime-me</a></strong></p> <p style="font-family: 'Open Sans', sans-serif; font-size: 16px; text-align: left;"><strong>✔Product Name - <a href="https://groups.google.com/g/viva-prime-male-enhancement-offer/c/zqv6GaJ7SSM">Viva Prime Male Enhancement</a><br /></strong></p> <p style="font-family: 'Open Sans', sans-serif; font-size: 16px; text-align: left;"><strong>✔Side Effect -&nbsp;No Side Effects<br /></strong></p> <p style="font-family: 'Open Sans', sans-serif; font-size: 16px; text-align: left;"><strong>✔Availability - <a href="https://sale365day.com/get-viva-prime-me">Online</a></strong></p> <p><strong>✔</strong><strong>Rating -⭐⭐⭐⭐⭐</strong></p> <p><a style="color: black;" href="https://sale365day.com/get-viva-prime-me"><span style="font-size: large;"><strong>Hurry Up - Limited Time Offer - Purchase Now</strong></span></a></p> <p><a style="color: black;" href="https://sale365day.com/get-viva-prime-me"><span style="font-size: large;"><strong>Hurry Up - Limited Time Offer - Purchase Now</strong></span></a></p> <p><a style="color: black;" href="https://sale365day.com/get-viva-prime-me"><span style="font-size: large;"><strong>Hurry Up - Limited Time Offer - Purchase Now</strong></span></a></p> <p>Viva Prime Male Enhancement is a product for natural stamina and virility. In this blog we will analyze about the effectiveness of this product. Viva Prime is a male booster that promises to increase stamina and virility naturally. If you are searching for ways to improve your physical and mental health, this review will provide you with valuable insight to help you make an informed decision.</p> <p><a href="https://groups.google.com/g/viva-prime-male-enhancement-offer/c/rVqFuLyG6W0">Viva Prime Male Enhancement</a> offers an array of compelling benefits that can profoundly impact your overall well-being. Its primary advantage lies in its ability to naturally enhance stamina, empowering you to conquer physical challenges with renewed vigor. Whether you're an athlete, fitness enthusiast, or someone striving for peak performance, Viva Prime can be an invaluable aid in achieving your goals.&nbsp;</p> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://sale365day.com/get-viva-prime-me"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEiNTVLhKpJg6GMgnTKrRSLVF5dxGE5sXN7QXstSfZxTZnCh017uBx4tHbg_HZX81X4Y4ZsCQhKQ2KJugznPJtlPWGvBRSxzV1prehBZtCGXTtR2Gu-Y1f0R6-_zSWgYLNdtsSMMoY_FXLbPBcd9CZRY_9IRU4FuZh_TJP7WBOSicGKcHTYRCqzxzg-C/w640-h340/oie_I7werrR9E4Mf.jpg" alt="" width="640" height="340" border="0" data-original-height="560" data-original-width="1054" /></a></div> <p>Moreover, this male booster claims to boost virility, which can positively influence your intimate relationships. Improved virility fosters confidence and satisfaction, contributing to a more fulfilling personal life. Additionally, Viva Prime's unique formulation is believed to extend its benefits to mental health, promoting focus, clarity, and motivation.</p> <p>By leveraging the power of carefully selected ingredients, <a href="https://groups.google.com/g/viva-prime-male-enhancement-offer">Viva Prime Male Enhancement</a> aims to provide a holistic approach to wellness. However, individual results may vary, making it essential to consult a healthcare professional before incorporating any supplement into your routine.</p> <p><u><a href="https://sale365day.com/get-viva-prime-me" target="_blank" rel="nofollow noopener"><span style="font-size: large;"><strong>Viva Prime Male Enhancement Is On Sale Now For A Limited Time!</strong></span></a></u></p> <h2 style="text-align: left;"><strong>Claims Made by Viva Prime Male Enhancement Review</strong></h2> <p><a href="https://viva-prime-male-enhancement-official.jimdosite.com/">Viva Prime Male Enhancement</a> has garnered significant attention through its proponents' various claims. One primary assertion is its ability to deliver noticeable improvements in stamina and endurance levels. The male booster's proprietary blend of natural ingredients is specifically formulated to support sustained energy levels, allowing users to push their physical limits.</p> <p>Furthermore, the review highlights Viva Prime's promise to enhance virility, leading to improved intimate experiences. The supplement is said to work synergistically with the body's natural processes to boost libido and performance.</p> <p>Moreover, Viva Prime's unique composition is claimed to have a positive impact on mental health. Users are purportedly more focused, motivated, and mentally resilient, enabling them to overcome challenges with ease.</p> <p>While these claims may be enticing, it is essential to approach them with a discerning eye. Individual experiences can vary, and it is crucial to consider multiple sources of information and consult a healthcare professional before making any decisions regarding supplements.</p> <h2 style="text-align: left;"><strong>Understanding Viva Prime Male Enhancement</strong></h2> <p><a href="https://yourpillsboss.blogspot.com/2023/09/viva-prime-male-enhancement-2023.html">Viva Prime</a> is a male booster that aims to elevate stamina, virility, and overall well-being naturally. Its proprietary blend of carefully selected ingredients claims to offer a holistic approach to enhancing physical and mental performance.</p> <p>The primary focus of Viva Prime Male Enhancement is to support individuals in achieving peak stamina levels, enabling them to conquer physical challenges with increased endurance and vitality. By tapping into the body's natural processes, Viva Prime Male Enhancement seeks to provide sustainable energy throughout the day.</p> <p>Additionally, the male booster targets virility, with promises of heightened libido and improved intimate experiences. Viva Prime's formulation is designed to stimulate male vitality and confidence, fostering a sense of satisfaction in personal relationships.</p> <p>Understanding the mechanisms behind Viva Prime's functioning is crucial in making informed decisions about its potential benefits. By exploring its ingredients and their intended effects, users can gain insight into whether this supplement aligns with their specific health and wellness goals. As with any supplement, it is recommended to consult a healthcare professional to ensure its suitability for individual needs.&nbsp;</p> <p><u><a href="https://sale365day.com/get-viva-prime-me" target="_blank" rel="nofollow noopener"><span style="font-size: large;"><strong>[ACT NOW] Don't miss out on this amazing offer!</strong></span></a></u></p> <h2 style="text-align: left;"><strong>User Reviews on Viva Prime Male Enhancement</strong></h2> <p>User reviews play a vital role in evaluating the effectiveness and real-world impact of any supplement, including Viva Prime Male Enhancement. While some individuals have reported positive experiences with the product, others have shared mixed opinions.</p> <p>Several users have praised Viva Prime for delivering noticeable improvements in stamina and endurance, enabling them to perform better in their physical activities. Additionally, some have expressed satisfaction with the supplement's purported impact on virility, citing enhanced intimate experiences.</p> <p>Conversely, there have been users who did not experience significant changes or results with Viva Prime. Factors such as individual body chemistry and lifestyle may influence the product's efficacy for each person.</p> <p>As with any supplement, it is crucial to consider a range of user reviews to form a balanced understanding of Viva Prime's effects. Consulting with a healthcare professional can also provide personalized insights and guidance for those interested in incorporating the product into their health and wellness routine.&nbsp;</p> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://sale365day.com/get-viva-prime-me"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEiU5tt7KbQivN8QWrZTP8meYbjXKfPMjywvHJEMdYKB-GZZdqnX667HkPowaDQ4dL0-SvGgZIq82a_XZWqR19teTmjnoW-AP-2wRRAUH7Zvw_KjkOM5dFt3ae7xAhLuGv1qlWd7V18JmrjhWS-SSo1on0qXoJfQvcOoAPOiSTt-i4-YZ_dtXUmdSGpO/w640-h358/qdqwdswdw.JPG" alt="" width="640" height="358" border="0" data-original-height="438" data-original-width="784" /></a></div> <h2 style="text-align: left;"><strong>Viva Prime Male Enhancement Side Effects and Precautions</strong></h2> <p>While Viva Prime Male Enhancement claims to be formulated with natural ingredients, it is essential to be aware of potential side effects and take necessary precautions. As with any supplement, individual reactions can vary, and some users may experience mild side effects.</p> <p>Commonly reported side effects include digestive discomfort, such as upset stomach or bloating, particularly during the initial stages of use. However, these effects are generally temporary and may subside as the body adjusts to the supplement.</p> <p>To minimize the risk of adverse reactions, it is advisable to follow the recommended dosage and usage guidelines provided by the manufacturer. Users with pre-existing medical conditions or those taking other medications should consult a healthcare professional before incorporating Viva Prime into their routine.</p> <p>Moreover, pregnant or nursing individuals should avoid using the supplement without medical approval. As with any dietary supplement, it is best to prioritize safety and seek professional advice when necessary to ensure a positive and safe experience with Viva Prime Male Enhancement.&nbsp;</p> <p><u><a href="https://sale365day.com/get-viva-prime-me" target="_blank" rel="nofollow noopener"><span style="font-size: large;"><strong>Click Here to Get Viva Prime Male Enhancement Supplement At Discounted Price!!!</strong></span></a></u></p> <h2 style="text-align: left;"><strong>Comparing Viva Prime Male Enhancement with Other Similar Products</strong></h2> <p>When considering a male booster like <a href="https://infogram.com/viva-prime-male-enhancement-2023-update-can-it-actually-help-regulate-blood-flow-or-is-it-just-hype-1hdw2jpo5d8xp2l?live">Viva Prime Male Enhancement</a>, it's essential to assess how it stacks up against other similar products in the market. Several supplements claim to offer stamina enhancement and virility support, making it crucial to make informed comparisons.</p> <p>Viva Prime sets itself apart with its emphasis on natural ingredients and its unique proprietary blend. While some products may rely on synthetic compounds, Viva Prime Male Enhancement seeks to harness the power of nature to boost performance.</p> <p>Moreover, unlike certain products that solely target physical aspects, Viva Prime Male Enhancement also addresses mental well-being, aiming to provide a comprehensive approach to overall vitality.</p> <p>To make an informed decision, potential users should evaluate the ingredients, user reviews, and overall reputation of each product. Individual preferences and health requirements should guide the choice of the most suitable supplement.</p> <p>As with any supplement, it is advisable to consult a healthcare professional before beginning a new regimen to ensure safety and efficacy for individual needs.</p> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://sale365day.com/get-viva-prime-me"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEh4ziNs4WZq4xzS_QPWE0_gLwWFZ9_gKIZGKePp1BMhs0Q5R9cqz1spEStYSS_T-jUL-cps28G85tNyeHbYDiIfXvw3c-qcpSd30aFYiJv7V_YQwRKSxP49K7Wja3nWyRxxazUN5OJtIod7xBGhFT5HVh9DzO2P_6TGOismmKbSJ514BCzLWPBW5-sb/w640-h418/dqswddd.JPG" alt="" width="640" height="418" border="0" data-original-height="597" data-original-width="912" /></a></div> <h2 style="text-align: left;"><strong>Dosage and Recommendations</strong></h2> <p>To achieve optimal results with Viva Prime Male Enhancement, adhering to the recommended dosage and usage guidelines is essential. The manufacturer typically provides clear instructions on the product label or packaging.</p> <p>For most individuals, the standard dosage involves taking a specific number of capsules daily, usually with a meal. It is crucial to follow this regimen consistently to allow the body to absorb and benefit from the supplement's ingredients effectively.</p> <p>However, it is vital to avoid exceeding the recommended dosage, as doing so may lead to adverse effects or diminish the desired results. Consistency is key, and users are encouraged to incorporate Viva Prime Male Enhancement into their daily routine to experience its potential benefits over time.</p> <p>If you have any underlying health conditions or are taking other medications, it is essential to consult with a healthcare professional before starting the supplement. Their expert guidance will help determine the most appropriate dosage and usage plan based on your individual health needs.&nbsp;</p> <p><u><a href="https://sale365day.com/get-viva-prime-me" target="_blank" rel="nofollow noopener"><span style="font-size: large;"><strong>Get A Special Deal And Bonuses When You Order Today!</strong></span></a></u></p> <h2 style="text-align: left;"><strong>Ingredients of Viva Prime Male Enhancement</strong></h2> <p><a href="https://vivaprimemepills.bandcamp.com/track/viva-prime-male-enhancement-new-report-buyer-beware-must-see-warning-before-buying">Viva Prime</a> prides itself on a thoughtfully curated blend of natural ingredients, each chosen to support its intended benefits. While the specific formulation may vary depending on the product version, some common key ingredients found in Viva Prime Male Enhancement include:</p> <p>1. L-Arginine: A vital amino acid that helps promote increased blood flow, supporting stamina and physical performance.</p> <p>2. Tribulus Terrestris: Known for its potential to enhance male virility and libido, this herb has been used in traditional medicine for centuries.</p> <p>3. Horny Goat Weed (Epimedium): Renowned for its aphrodisiac properties, this herb may contribute to improved sexual function and desire.</p> <p>4. Fenugreek: With a history in traditional medicine, fenugreek is believed to support testosterone levels and vitality.</p> <p>5. Tongkat Ali: Also known as Longjack, this herb may aid in enhancing stamina and endurance.</p> <p>6. Maca Root: Rich in nutrients, maca root is believed to contribute to overall energy levels and vitality.</p> <p>Viva Prime's ingredients are often chosen for their natural properties, which align with its focus on providing a holistic approach to enhancing stamina, virility, and overall well-being. However, it is essential to consider individual sensitivities and consult with a healthcare professional before starting any new supplement regimen.</p> <h2 style="text-align: left;"><strong>Conclusion</strong></h2> <p>In conclusion, <a href="https://viva-prime-male-enhancement-2023.webflow.io/">Viva Prime Male Enhancement</a> has emerged as a male booster supplement with claims of natural stamina enhancement and virility support. While user experiences and reviews have been varied, it is evident that the product's unique formulation has resonated positively with some individuals.</p> <p>The benefits of <a href="https://www.facebook.com/profile.php?id=61550994325931">Viva Prime Male Enhancement</a>, such as increased stamina, improved virility, and potential mental health advantages, have piqued the interest of many seeking to elevate their overall well-being. However, it is essential to approach the product with a discerning eye, considering individual factors and health requirements.</p> <p>As with any supplement, it is advisable to consult a healthcare professional before incorporating <a href="https://vivaprimemaleenhancement0.godaddysites.com/">Viva Prime Male Enhancement</a> into your daily routine. Their expertise can help determine whether this male booster aligns with your specific health goals and potential concerns.&nbsp;</p> <p><u><a href="https://sale365day.com/get-viva-prime-me" target="_blank" rel="nofollow noopener"><span style="font-size: large;"><strong>Click Here to Get Viva Prime Male Enhancement Supplement At Discounted Price!!!</strong></span></a></u></p> <p>Ultimately, the decision to try <a href="https://devfolio.co/@vivaprimeme">Viva Prime Male Enhancement</a> rests with the user, and the best approach involves balanced research, cautious consideration, and a focus on personal well-being.</p> <div class="separator" style="clear: both; text-align: center;"><a style="margin-left: 1em; margin-right: 1em;" href="https://sale365day.com/get-viva-prime-me"><img src="https://blogger.googleusercontent.com/img/b/R29vZ2xl/AVvXsEiYfr6Nj1JIyXjnTb4qMwBQ5JqH_dXx1c6PurNgTHN69920ZGNgmfzKe1i8xFWogeTZP0dJX2O0sJ5EBaNBG1HL_G_TJrRTtu0hjrex1l8s59tXAbBWNW-AAAszpNXVHpIxLW3vRYsJ-GGDTjGC77I2lcGLhroB9NrnFBF5ScXJknxYc2kJVDjGJB4E/w640-h374/qdqsddqds.JPG" alt="" width="640" height="374" border="0" data-original-height="494" data-original-width="845" /></a></div> <h2 style="text-align: left;"><strong>FAQ</strong></h2> <p><strong>1. Are there any drawbacks to using Viva Prime Male Enhancement?</strong></p> <p>While <a href="https://lookerstudio.google.com/reporting/9b5ce35a-8ed6-4530-841b-fa9e6ba64d88/page/wyWbD">Viva Prime Male Enhancement</a> is formulated with natural ingredients, some users may experience mild side effects, such as digestive discomfort. It is essential to be mindful of individual sensitivities and follow the recommended dosage to minimize potential drawbacks.</p> <p><strong>2. What are the side effects and precautions of Viva Prime?</strong></p> <p>Commonly reported side effects include temporary digestive issues. To ensure safety, adhere to the recommended dosage and consult a healthcare professional, especially if you have underlying health conditions or are taking medications.</p> <p><strong>3. How does Viva Prime Male Enhancement compare to other similar products?</strong></p> <p>Viva Prime Male Enhancement differentiates itself with its emphasis on natural ingredients and comprehensive approach to well-being. Users should assess individual needs and preferences when comparing it to other male booster supplements in the market.</p> <p><strong>4. What is the recommended dosage for Viva Prime Male Enhancement?</strong></p> <p>The standard dosage typically involves taking a specific number of capsules daily with a meal. Adherence to the recommended dosage is crucial to experience potential benefits effectively.</p> <p><strong>5. Can Viva Prime Male Enhancement be used by everyone?</strong></p> <p>While <a href="https://sketchfab.com/3d-models/viva-prime-male-enhancement-alert-2023-working-fb9971457d1641048fe54ed5a99feaf3">Viva Prime Male Enhancement</a> is generally safe for many individuals, pregnant or nursing individuals should avoid using it without medical approval. Consulting with a healthcare professional is advised for personalized recommendations.</p> <p><strong>6. How long does it take to see results with Viva Prime Male Enhancement</strong></p> <p>Individual experiences may vary, and results are influenced by factors such as lifestyle and body chemistry. Consistent use and patience are essential to determine the supplement's effectiveness.&nbsp;</p> <p><u><a href="https://sale365day.com/get-viva-prime-me" target="_blank" rel="nofollow noopener"><span style="font-size: large;"><strong>Click Here to Get Viva Prime Male Enhancement Supplement At Discounted Price!!!</strong></span></a></u></p> <p><strong>7. Is Viva Prime Male Enhancement worth trying?</strong></p> <p>The decision to try Viva Primeshould be based on individual health needs and preferences. Consulting with a healthcare professional can provide personalized insights to determine if the product aligns with your goals.</p> <p><a href="https://groups.google.com/g/viva-prime-male-enhancement-offer/c/rVqFuLyG6W0">Viva Prime Male Enhancement</a> offers an array of compelling benefits that can profoundly impact your overall well-being. Its primary advantage lies in its ability to naturally enhance stamina, empowering you to conquer physical challenges with renewed vigor. Whether you're an athlete, fitness enthusiast, or someone striving for peak performance, Viva Prime can be an invaluable aid in achieving your goals.&nbsp;</p> <p><strong>Read More:</strong></p> <p><a href="https://yourpillsboss.blogspot.com/2023/09/viva-prime-male-enhancement-2023.html">https://yourpillsboss.blogspot.com/2023/09/viva-prime-male-enhancement-2023.html</a><br /><a href="https://viva-prime-male-enhancement-official.jimdosite.com/">https://viva-prime-male-enhancement-official.jimdosite.com/</a><br /><a href="https://groups.google.com/g/viva-prime-male-enhancement-offer">https://groups.google.com/g/viva-prime-male-enhancement-offer</a><br /><a href="https://groups.google.com/g/viva-prime-male-enhancement-offer/c/rVqFuLyG6W0">https://groups.google.com/g/viva-prime-male-enhancement-offer/c/rVqFuLyG6W0</a><br /><a href="https://groups.google.com/g/viva-prime-male-enhancement-offer/c/zqv6GaJ7SSM">https://groups.google.com/g/viva-prime-male-enhancement-offer/c/zqv6GaJ7SSM</a><br /><a href="https://lookerstudio.google.com/reporting/9b5ce35a-8ed6-4530-841b-fa9e6ba64d88/page/wyWbD">https://lookerstudio.google.com/reporting/9b5ce35a-8ed6-4530-841b-fa9e6ba64d88/page/wyWbD</a><br /><a href="https://devfolio.co/@vivaprimeme">https://devfolio.co/@vivaprimeme</a><br /><a href="https://www.facebook.com/profile.php?id=61550994325931">https://www.facebook.com/profile.php?id=61550994325931</a><br /><a href="https://viva-prime-male-enhancement-2023.webflow.io/">https://viva-prime-male-enhancement-2023.webflow.io/</a><br /><a href="https://viva-prime-me.clubeo.com/page/viva-prime-male-enhancement-update-2023-trustworthy-male-health-support-or-fake-supplement.html">https://viva-prime-me.clubeo.com/page/viva-prime-male-enhancement-update-2023-trustworthy-male-health-support-or-fake-supplement.html</a><br /><a href="https://infogram.com/viva-prime-male-enhancement-2023-update-can-it-actually-help-regulate-blood-flow-or-is-it-just-hype-1hdw2jpo5d8xp2l">https://infogram.com/viva-prime-male-enhancement-2023-update-can-it-actually-help-regulate-blood-flow-or-is-it-just-hype-1hdw2jpo5d8xp2l</a><br /><a href="https://vivaprimemepills.bandcamp.com/track/viva-prime-male-enhancement-new-report-buyer-beware-must-see-warning-before-buying">https://vivaprimemepills.bandcamp.com/track/viva-prime-male-enhancement-new-report-buyer-beware-must-see-warning-before-buying</a><br /><a href="https://vivaprimemaleenhancement0.godaddysites.com/">https://vivaprimemaleenhancement0.godaddysites.com/</a><br /><a href="https://sketchfab.com/3d-models/viva-prime-male-enhancement-alert-2023-working-fb9971457d1641048fe54ed5a99feaf3">https://sketchfab.com/3d-models/viva-prime-male-enhancement-alert-2023-working-fb9971457d1641048fe54ed5a99feaf3</a><br /><a href="https://vivaprimemescam.contently.com/">https://vivaprimemescam.contently.com/</a><br /><a href="https://medium.com/@vivaprime_46447/viva-prime-male-enhancement-scam-or-legit-most-powerful-male-formula-5627ffb8cfc6">https://medium.com/@vivaprime_46447/viva-prime-male-enhancement-scam-or-legit-most-powerful-male-formula-5627ffb8cfc6</a><br /><a href="https://medium.com/@vivaprime_46447">https://medium.com/@vivaprime_46447</a><br /><a href="https://soundcloud.com/viva-prime-male-enhancement-669793480/viva-prime-male-enhancement-alarming-disturbing-side-effects-or-negative-complaints">https://soundcloud.com/viva-prime-male-enhancement-669793480/viva-prime-male-enhancement-alarming-disturbing-side-effects-or-negative-complaints</a><br /><a href="https://www.forexagone.com/forum/experiences-trading/viva-prime-male-enhancement-formulated-with-100-pure-ingredients-that-maintain-sexual-health-68126#165343">https://www.forexagone.com/forum/experiences-trading/viva-prime-male-enhancement-formulated-with-100-pure-ingredients-that-maintain-sexual-health-68126#165343</a><br /><a href="https://community.weddingwire.in/forum/viva-prime-male-enhancement-fda-approved-2023-unexpected-details-revealed--t146362">https://community.weddingwire.in/forum/viva-prime-male-enhancement-fda-approved-2023-unexpected-details-revealed--t146362</a><br /><a href="https://viva-prime-male-enhancement.hashnode.dev/viva-prime-male-enhancement-new-report-2023-do-not-buy-till-you-read-my-real-experience">https://viva-prime-male-enhancement.hashnode.dev/viva-prime-male-enhancement-new-report-2023-do-not-buy-till-you-read-my-real-experience</a><br /><a href="https://hashnode.com/@vivaprimemaleenhace">https://hashnode.com/@vivaprimemaleenhace</a><br /><a href="https://devfolio.co/@vivaprimemepill">https://devfolio.co/@vivaprimemepill</a><br /><a href="https://sketchfab.com/3d-models/viva-prime-male-enhancement-reviews-update-use-48775974c7924d6c83bc62c4645384b1">https://sketchfab.com/3d-models/viva-prime-male-enhancement-reviews-update-use-48775974c7924d6c83bc62c4645384b1</a><br /><a href="https://viva-prime-updates.clubeo.com/page/viva-prime-male-enhancement-helps-to-increase-the-duration-of-erection.html">https://viva-prime-updates.clubeo.com/page/viva-prime-male-enhancement-helps-to-increase-the-duration-of-erection.html</a><br /><a href="https://vivaprimemaleenhancementusa.bandcamp.com/track/viva-prime-male-enhancement-reviews-2023-real-side-effects-risk-or-ingredients-that-work">https://vivaprimemaleenhancementusa.bandcamp.com/track/viva-prime-male-enhancement-reviews-2023-real-side-effects-risk-or-ingredients-that-work</a><br /><a href="https://vivaprimemaleenhancementusa.contently.com/">https://vivaprimemaleenhancementusa.contently.com/</a></p>
mozart-ai/test-xawery-qa
2023-09-02T13:34:15.000Z
[ "region:us" ]
mozart-ai
null
null
null
0
0
--- dataset_info: features: - name: url dtype: string - name: answer dtype: string - name: question dtype: string splits: - name: train num_bytes: 6088 num_examples: 35 download_size: 4384 dataset_size: 6088 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "test-xawery-qa" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
rajaswa-postman/es_chat
2023-09-02T14:15:24.000Z
[ "region:us" ]
rajaswa-postman
null
null
null
0
0
Entry not found
haurajahra/SQUAD_id
2023-09-02T13:55:02.000Z
[ "license:other", "region:us" ]
haurajahra
null
null
null
0
0
--- license: other ---
polymath707/vietnamese-handmade
2023-09-02T13:58:31.000Z
[ "license:apache-2.0", "region:us" ]
polymath707
null
null
null
0
0
--- license: apache-2.0 ---
pipi00pipi/smotrich_he
2023-09-02T13:58:46.000Z
[ "license:openrail", "region:us" ]
pipi00pipi
null
null
null
0
0
--- license: openrail ---
open-llm-leaderboard/details_facebook__galactica-1.3b
2023-09-02T13:59:23.000Z
[ "region:us" ]
open-llm-leaderboard
null
null
null
0
0
--- pretty_name: Evaluation run of facebook/galactica-1.3b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [facebook/galactica-1.3b](https://huggingface.co/facebook/galactica-1.3b) on the\ \ [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the agregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_facebook__galactica-1.3b\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-09-02T13:58:08.758268](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__galactica-1.3b/blob/main/results_2023-09-02T13%3A58%3A08.758268.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2724936919306173,\n\ \ \"acc_stderr\": 0.03226048262400512,\n \"acc_norm\": 0.2744720231892299,\n\ \ \"acc_norm_stderr\": 0.03227238640653428,\n \"mc1\": 0.2484700122399021,\n\ \ \"mc1_stderr\": 0.015127427096520667,\n \"mc2\": 0.41399712836660274,\n\ \ \"mc2_stderr\": 0.01494063292915903\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.2960750853242321,\n \"acc_stderr\": 0.013340916085246261,\n\ \ \"acc_norm\": 0.3412969283276451,\n \"acc_norm_stderr\": 0.01385583128749772\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.3375821549492133,\n\ \ \"acc_stderr\": 0.004719187890948067,\n \"acc_norm\": 0.40908185620394344,\n\ \ \"acc_norm_stderr\": 0.004906595857916765\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n\ \ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.34074074074074073,\n\ \ \"acc_stderr\": 0.04094376269996793,\n \"acc_norm\": 0.34074074074074073,\n\ \ \"acc_norm_stderr\": 0.04094376269996793\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.03459777606810536,\n\ \ \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.03459777606810536\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.2,\n\ \ \"acc_stderr\": 0.040201512610368445,\n \"acc_norm\": 0.2,\n \ \ \"acc_norm_stderr\": 0.040201512610368445\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.26037735849056604,\n \"acc_stderr\": 0.027008766090708094,\n\ \ \"acc_norm\": 0.26037735849056604,\n \"acc_norm_stderr\": 0.027008766090708094\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n\ \ \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n\ \ \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \ \ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24277456647398843,\n\ \ \"acc_stderr\": 0.0326926380614177,\n \"acc_norm\": 0.24277456647398843,\n\ \ \"acc_norm_stderr\": 0.0326926380614177\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.20588235294117646,\n \"acc_stderr\": 0.04023382273617749,\n\ \ \"acc_norm\": 0.20588235294117646,\n \"acc_norm_stderr\": 0.04023382273617749\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n\ \ \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.3148936170212766,\n \"acc_stderr\": 0.030363582197238167,\n\ \ \"acc_norm\": 0.3148936170212766,\n \"acc_norm_stderr\": 0.030363582197238167\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n\ \ \"acc_stderr\": 0.04142439719489362,\n \"acc_norm\": 0.2631578947368421,\n\ \ \"acc_norm_stderr\": 0.04142439719489362\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.2689655172413793,\n \"acc_stderr\": 0.036951833116502325,\n\ \ \"acc_norm\": 0.2689655172413793,\n \"acc_norm_stderr\": 0.036951833116502325\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.26455026455026454,\n \"acc_stderr\": 0.022717467897708624,\n \"\ acc_norm\": 0.26455026455026454,\n \"acc_norm_stderr\": 0.022717467897708624\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.15873015873015872,\n\ \ \"acc_stderr\": 0.03268454013011743,\n \"acc_norm\": 0.15873015873015872,\n\ \ \"acc_norm_stderr\": 0.03268454013011743\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n\ \ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.27741935483870966,\n\ \ \"acc_stderr\": 0.025470196835900055,\n \"acc_norm\": 0.27741935483870966,\n\ \ \"acc_norm_stderr\": 0.025470196835900055\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.29064039408866993,\n \"acc_stderr\": 0.0319474007226554,\n\ \ \"acc_norm\": 0.29064039408866993,\n \"acc_norm_stderr\": 0.0319474007226554\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\"\ : 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.30303030303030304,\n \"acc_stderr\": 0.03588624800091708,\n\ \ \"acc_norm\": 0.30303030303030304,\n \"acc_norm_stderr\": 0.03588624800091708\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.2727272727272727,\n \"acc_stderr\": 0.03173071239071724,\n \"\ acc_norm\": 0.2727272727272727,\n \"acc_norm_stderr\": 0.03173071239071724\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.29015544041450775,\n \"acc_stderr\": 0.032752644677915166,\n\ \ \"acc_norm\": 0.29015544041450775,\n \"acc_norm_stderr\": 0.032752644677915166\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.2846153846153846,\n \"acc_stderr\": 0.022878322799706283,\n\ \ \"acc_norm\": 0.2846153846153846,\n \"acc_norm_stderr\": 0.022878322799706283\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.27037037037037037,\n \"acc_stderr\": 0.027080372815145668,\n \ \ \"acc_norm\": 0.27037037037037037,\n \"acc_norm_stderr\": 0.027080372815145668\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.24789915966386555,\n \"acc_stderr\": 0.028047967224176896,\n\ \ \"acc_norm\": 0.24789915966386555,\n \"acc_norm_stderr\": 0.028047967224176896\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.32450331125827814,\n \"acc_stderr\": 0.03822746937658753,\n \"\ acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.03822746937658753\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.25688073394495414,\n \"acc_stderr\": 0.018732492928342465,\n \"\ acc_norm\": 0.25688073394495414,\n \"acc_norm_stderr\": 0.018732492928342465\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.2638888888888889,\n \"acc_stderr\": 0.03005820270430985,\n \"\ acc_norm\": 0.2638888888888889,\n \"acc_norm_stderr\": 0.03005820270430985\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.24019607843137256,\n \"acc_stderr\": 0.02998373305591361,\n \"\ acc_norm\": 0.24019607843137256,\n \"acc_norm_stderr\": 0.02998373305591361\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.28270042194092826,\n \"acc_stderr\": 0.029312814153955924,\n \ \ \"acc_norm\": 0.28270042194092826,\n \"acc_norm_stderr\": 0.029312814153955924\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.27802690582959644,\n\ \ \"acc_stderr\": 0.03006958487449405,\n \"acc_norm\": 0.27802690582959644,\n\ \ \"acc_norm_stderr\": 0.03006958487449405\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.22137404580152673,\n \"acc_stderr\": 0.0364129708131373,\n\ \ \"acc_norm\": 0.22137404580152673,\n \"acc_norm_stderr\": 0.0364129708131373\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.3884297520661157,\n \"acc_stderr\": 0.04449270350068382,\n \"\ acc_norm\": 0.3884297520661157,\n \"acc_norm_stderr\": 0.04449270350068382\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n\ \ \"acc_stderr\": 0.042365112580946315,\n \"acc_norm\": 0.25925925925925924,\n\ \ \"acc_norm_stderr\": 0.042365112580946315\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.3067484662576687,\n \"acc_stderr\": 0.036230899157241474,\n\ \ \"acc_norm\": 0.3067484662576687,\n \"acc_norm_stderr\": 0.036230899157241474\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.30357142857142855,\n\ \ \"acc_stderr\": 0.043642261558410445,\n \"acc_norm\": 0.30357142857142855,\n\ \ \"acc_norm_stderr\": 0.043642261558410445\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.20388349514563106,\n \"acc_stderr\": 0.03989139859531771,\n\ \ \"acc_norm\": 0.20388349514563106,\n \"acc_norm_stderr\": 0.03989139859531771\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2777777777777778,\n\ \ \"acc_stderr\": 0.02934311479809445,\n \"acc_norm\": 0.2777777777777778,\n\ \ \"acc_norm_stderr\": 0.02934311479809445\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \ \ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n\ \ \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2835249042145594,\n\ \ \"acc_stderr\": 0.01611731816683229,\n \"acc_norm\": 0.2835249042145594,\n\ \ \"acc_norm_stderr\": 0.01611731816683229\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.28034682080924855,\n \"acc_stderr\": 0.02418242749657761,\n\ \ \"acc_norm\": 0.28034682080924855,\n \"acc_norm_stderr\": 0.02418242749657761\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24581005586592178,\n\ \ \"acc_stderr\": 0.01440029642922559,\n \"acc_norm\": 0.24581005586592178,\n\ \ \"acc_norm_stderr\": 0.01440029642922559\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.27124183006535946,\n \"acc_stderr\": 0.02545775669666788,\n\ \ \"acc_norm\": 0.27124183006535946,\n \"acc_norm_stderr\": 0.02545775669666788\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3215434083601286,\n\ \ \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.3215434083601286,\n\ \ \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.3055555555555556,\n \"acc_stderr\": 0.025630824975621358,\n\ \ \"acc_norm\": 0.3055555555555556,\n \"acc_norm_stderr\": 0.025630824975621358\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.26595744680851063,\n \"acc_stderr\": 0.026358065698880592,\n \ \ \"acc_norm\": 0.26595744680851063,\n \"acc_norm_stderr\": 0.026358065698880592\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2685788787483703,\n\ \ \"acc_stderr\": 0.011320056629121734,\n \"acc_norm\": 0.2685788787483703,\n\ \ \"acc_norm_stderr\": 0.011320056629121734\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.2610294117647059,\n \"acc_stderr\": 0.026679252270103114,\n\ \ \"acc_norm\": 0.2610294117647059,\n \"acc_norm_stderr\": 0.026679252270103114\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.3022875816993464,\n \"acc_stderr\": 0.018579232711113877,\n \ \ \"acc_norm\": 0.3022875816993464,\n \"acc_norm_stderr\": 0.018579232711113877\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.23636363636363636,\n\ \ \"acc_stderr\": 0.040693063197213775,\n \"acc_norm\": 0.23636363636363636,\n\ \ \"acc_norm_stderr\": 0.040693063197213775\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.2653061224489796,\n \"acc_stderr\": 0.028263889943784606,\n\ \ \"acc_norm\": 0.2653061224489796,\n \"acc_norm_stderr\": 0.028263889943784606\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n\ \ \"acc_stderr\": 0.030360490154014673,\n \"acc_norm\": 0.24378109452736318,\n\ \ \"acc_norm_stderr\": 0.030360490154014673\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.29518072289156627,\n\ \ \"acc_stderr\": 0.03550920185689631,\n \"acc_norm\": 0.29518072289156627,\n\ \ \"acc_norm_stderr\": 0.03550920185689631\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.26900584795321636,\n \"acc_stderr\": 0.0340105262010409,\n\ \ \"acc_norm\": 0.26900584795321636,\n \"acc_norm_stderr\": 0.0340105262010409\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2484700122399021,\n\ \ \"mc1_stderr\": 0.015127427096520667,\n \"mc2\": 0.41399712836660274,\n\ \ \"mc2_stderr\": 0.01494063292915903\n }\n}\n```" repo_url: https://huggingface.co/facebook/galactica-1.3b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|arc:challenge|25_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hellaswag|10_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-management|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-09-02T13:58:08.758268.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-09-02T13:58:08.758268.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_09_02T13_58_08.758268 path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T13:58:08.758268.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-09-02T13:58:08.758268.parquet' - config_name: results data_files: - split: 2023_09_02T13_58_08.758268 path: - results_2023-09-02T13:58:08.758268.parquet - split: latest path: - results_2023-09-02T13:58:08.758268.parquet --- # Dataset Card for Evaluation run of facebook/galactica-1.3b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/facebook/galactica-1.3b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [facebook/galactica-1.3b](https://huggingface.co/facebook/galactica-1.3b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the agregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_facebook__galactica-1.3b", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-09-02T13:58:08.758268](https://huggingface.co/datasets/open-llm-leaderboard/details_facebook__galactica-1.3b/blob/main/results_2023-09-02T13%3A58%3A08.758268.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2724936919306173, "acc_stderr": 0.03226048262400512, "acc_norm": 0.2744720231892299, "acc_norm_stderr": 0.03227238640653428, "mc1": 0.2484700122399021, "mc1_stderr": 0.015127427096520667, "mc2": 0.41399712836660274, "mc2_stderr": 0.01494063292915903 }, "harness|arc:challenge|25": { "acc": 0.2960750853242321, "acc_stderr": 0.013340916085246261, "acc_norm": 0.3412969283276451, "acc_norm_stderr": 0.01385583128749772 }, "harness|hellaswag|10": { "acc": 0.3375821549492133, "acc_stderr": 0.004719187890948067, "acc_norm": 0.40908185620394344, "acc_norm_stderr": 0.004906595857916765 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.34074074074074073, "acc_stderr": 0.04094376269996793, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.04094376269996793 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.23684210526315788, "acc_stderr": 0.03459777606810536, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.03459777606810536 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.2, "acc_stderr": 0.040201512610368445, "acc_norm": 0.2, "acc_norm_stderr": 0.040201512610368445 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.26037735849056604, "acc_stderr": 0.027008766090708094, "acc_norm": 0.26037735849056604, "acc_norm_stderr": 0.027008766090708094 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0326926380614177, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0326926380614177 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.20588235294117646, "acc_stderr": 0.04023382273617749, "acc_norm": 0.20588235294117646, "acc_norm_stderr": 0.04023382273617749 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3148936170212766, "acc_stderr": 0.030363582197238167, "acc_norm": 0.3148936170212766, "acc_norm_stderr": 0.030363582197238167 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.04142439719489362, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.04142439719489362 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2689655172413793, "acc_stderr": 0.036951833116502325, "acc_norm": 0.2689655172413793, "acc_norm_stderr": 0.036951833116502325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.26455026455026454, "acc_stderr": 0.022717467897708624, "acc_norm": 0.26455026455026454, "acc_norm_stderr": 0.022717467897708624 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.15873015873015872, "acc_stderr": 0.03268454013011743, "acc_norm": 0.15873015873015872, "acc_norm_stderr": 0.03268454013011743 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.27741935483870966, "acc_stderr": 0.025470196835900055, "acc_norm": 0.27741935483870966, "acc_norm_stderr": 0.025470196835900055 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.29064039408866993, "acc_stderr": 0.0319474007226554, "acc_norm": 0.29064039408866993, "acc_norm_stderr": 0.0319474007226554 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.30303030303030304, "acc_stderr": 0.03588624800091708, "acc_norm": 0.30303030303030304, "acc_norm_stderr": 0.03588624800091708 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2727272727272727, "acc_stderr": 0.03173071239071724, "acc_norm": 0.2727272727272727, "acc_norm_stderr": 0.03173071239071724 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.29015544041450775, "acc_stderr": 0.032752644677915166, "acc_norm": 0.29015544041450775, "acc_norm_stderr": 0.032752644677915166 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2846153846153846, "acc_stderr": 0.022878322799706283, "acc_norm": 0.2846153846153846, "acc_norm_stderr": 0.022878322799706283 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.027080372815145668, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.027080372815145668 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.24789915966386555, "acc_stderr": 0.028047967224176896, "acc_norm": 0.24789915966386555, "acc_norm_stderr": 0.028047967224176896 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.03822746937658753, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.03822746937658753 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.25688073394495414, "acc_stderr": 0.018732492928342465, "acc_norm": 0.25688073394495414, "acc_norm_stderr": 0.018732492928342465 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03005820270430985, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03005820270430985 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.24019607843137256, "acc_stderr": 0.02998373305591361, "acc_norm": 0.24019607843137256, "acc_norm_stderr": 0.02998373305591361 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.28270042194092826, "acc_stderr": 0.029312814153955924, "acc_norm": 0.28270042194092826, "acc_norm_stderr": 0.029312814153955924 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.27802690582959644, "acc_stderr": 0.03006958487449405, "acc_norm": 0.27802690582959644, "acc_norm_stderr": 0.03006958487449405 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.22137404580152673, "acc_stderr": 0.0364129708131373, "acc_norm": 0.22137404580152673, "acc_norm_stderr": 0.0364129708131373 }, "harness|hendrycksTest-international_law|5": { "acc": 0.3884297520661157, "acc_stderr": 0.04449270350068382, "acc_norm": 0.3884297520661157, "acc_norm_stderr": 0.04449270350068382 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946315, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946315 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3067484662576687, "acc_stderr": 0.036230899157241474, "acc_norm": 0.3067484662576687, "acc_norm_stderr": 0.036230899157241474 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.043642261558410445, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.043642261558410445 }, "harness|hendrycksTest-management|5": { "acc": 0.20388349514563106, "acc_stderr": 0.03989139859531771, "acc_norm": 0.20388349514563106, "acc_norm_stderr": 0.03989139859531771 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02934311479809445, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02934311479809445 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2835249042145594, "acc_stderr": 0.01611731816683229, "acc_norm": 0.2835249042145594, "acc_norm_stderr": 0.01611731816683229 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.28034682080924855, "acc_stderr": 0.02418242749657761, "acc_norm": 0.28034682080924855, "acc_norm_stderr": 0.02418242749657761 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24581005586592178, "acc_stderr": 0.01440029642922559, "acc_norm": 0.24581005586592178, "acc_norm_stderr": 0.01440029642922559 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.27124183006535946, "acc_stderr": 0.02545775669666788, "acc_norm": 0.27124183006535946, "acc_norm_stderr": 0.02545775669666788 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.3215434083601286, "acc_stderr": 0.026527724079528872, "acc_norm": 0.3215434083601286, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.3055555555555556, "acc_stderr": 0.025630824975621358, "acc_norm": 0.3055555555555556, "acc_norm_stderr": 0.025630824975621358 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.26595744680851063, "acc_stderr": 0.026358065698880592, "acc_norm": 0.26595744680851063, "acc_norm_stderr": 0.026358065698880592 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2685788787483703, "acc_stderr": 0.011320056629121734, "acc_norm": 0.2685788787483703, "acc_norm_stderr": 0.011320056629121734 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.2610294117647059, "acc_stderr": 0.026679252270103114, "acc_norm": 0.2610294117647059, "acc_norm_stderr": 0.026679252270103114 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.3022875816993464, "acc_stderr": 0.018579232711113877, "acc_norm": 0.3022875816993464, "acc_norm_stderr": 0.018579232711113877 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.23636363636363636, "acc_stderr": 0.040693063197213775, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.040693063197213775 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2653061224489796, "acc_stderr": 0.028263889943784606, "acc_norm": 0.2653061224489796, "acc_norm_stderr": 0.028263889943784606 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.030360490154014673, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.030360490154014673 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.29518072289156627, "acc_stderr": 0.03550920185689631, "acc_norm": 0.29518072289156627, "acc_norm_stderr": 0.03550920185689631 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.26900584795321636, "acc_stderr": 0.0340105262010409, "acc_norm": 0.26900584795321636, "acc_norm_stderr": 0.0340105262010409 }, "harness|truthfulqa:mc|0": { "mc1": 0.2484700122399021, "mc1_stderr": 0.015127427096520667, "mc2": 0.41399712836660274, "mc2_stderr": 0.01494063292915903 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
navradio/hackday
2023-09-02T14:12:49.000Z
[ "license:apache-2.0", "region:us" ]
navradio
null
null
null
0
0
--- license: apache-2.0 ---
navradio/hack
2023-09-02T14:25:36.000Z
[ "region:us" ]
navradio
null
null
null
0
0
Entry not found
nielsr/test-data-nougat
2023-09-02T15:05:03.000Z
[ "region:us" ]
nielsr
null
null
null
0
0
Entry not found
aliencen/frpc
2023-09-02T15:17:51.000Z
[ "license:openrail", "region:us" ]
aliencen
null
null
null
0
0
--- license: openrail ---
siddharthbulia/guanaco-llama2
2023-09-02T15:17:04.000Z
[ "region:us" ]
siddharthbulia
null
null
null
0
0
--- dataset_info: features: - name: text dtype: string splits: - name: train num_bytes: 15401731 num_examples: 9846 download_size: 8983165 dataset_size: 15401731 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "guanaco-llama2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)