id
stringlengths
2
115
author
stringlengths
2
42
last_modified
timestamp[us, tz=UTC]
downloads
int64
0
8.87M
likes
int64
0
3.84k
paperswithcode_id
stringlengths
2
45
tags
list
lastModified
timestamp[us, tz=UTC]
createdAt
stringlengths
24
24
key
stringclasses
1 value
created
timestamp[us]
card
stringlengths
1
1.01M
embedding
list
library_name
stringclasses
21 values
pipeline_tag
stringclasses
27 values
mask_token
null
card_data
null
widget_data
null
model_index
null
config
null
transformers_info
null
spaces
null
safetensors
null
transformersInfo
null
modelId
stringlengths
5
111
embeddings
list
arieg/bw_spec_cls_4_13_s_200
arieg
2023-11-12T09:51:13Z
0
0
null
[ "region:us" ]
2023-11-12T09:51:13Z
2023-11-12T09:51:09.000Z
2023-11-12T09:51:09
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1102' '1': '1193' '2': '1195' '3': '1196' splits: - name: train num_bytes: 42910474.0 num_examples: 800 - name: test num_bytes: 1075378.0 num_examples: 20 download_size: 37970465 dataset_size: 43985852.0 --- # Dataset Card for "bw_spec_cls_4_13_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7009830474853516, -0.05677355080842972, 0.32410675287246704, 0.5328739881515503, -0.21018086373806, -0.08473947644233704, 0.004045888315886259, -0.11393887549638748, 0.6002256274223328, 0.45773443579673767, -0.9197499752044678, -0.7406975626945496, -0.5272969007492065, 0.085614643990993...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_4_14_s_200
arieg
2023-11-12T09:58:08Z
0
0
null
[ "region:us" ]
2023-11-12T09:58:08Z
2023-11-12T09:58:04.000Z
2023-11-12T09:58:04
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1197' '1': '1270' '2': '1276' '3': '1277' splits: - name: train num_bytes: 43731623.0 num_examples: 800 - name: test num_bytes: 1102972.0 num_examples: 20 download_size: 37991761 dataset_size: 44834595.0 --- # Dataset Card for "bw_spec_cls_4_14_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7131273746490479, -0.019503889605402946, 0.3154606819152832, 0.5297505855560303, -0.2183266431093216, -0.12583886086940765, 0.01792769879102707, -0.1198868378996849, 0.5526516437530518, 0.5336467027664185, -0.8992763161659241, -0.7755262851715088, -0.5263316631317139, 0.1463942527770996...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_4_15_s_200
arieg
2023-11-12T10:05:00Z
0
0
null
[ "region:us" ]
2023-11-12T10:05:00Z
2023-11-12T10:04:56.000Z
2023-11-12T10:04:56
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1278' '1': '1417' '2': '1427' '3': '1443' splits: - name: train num_bytes: 42892698.0 num_examples: 800 - name: test num_bytes: 1071895.0 num_examples: 20 download_size: 38128383 dataset_size: 43964593.0 --- # Dataset Card for "bw_spec_cls_4_15_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7199586033821106, -0.07063253968954086, 0.3050138056278229, 0.5110980868339539, -0.18995465338230133, -0.057594869285821915, 0.004501620773226023, -0.16531504690647125, 0.5664734840393066, 0.4444481432437897, -0.8874610662460327, -0.7649683952331543, -0.46669748425483704, 0.072686031460...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_4_16_s_200
arieg
2023-11-12T10:11:44Z
0
0
null
[ "region:us" ]
2023-11-12T10:11:44Z
2023-11-12T10:11:41.000Z
2023-11-12T10:11:41
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1482' '1': '1510' '2': '1544' '3': '1642' splits: - name: train num_bytes: 43983230.0 num_examples: 800 - name: test num_bytes: 1108325.0 num_examples: 20 download_size: 38471730 dataset_size: 45091555.0 --- # Dataset Card for "bw_spec_cls_4_16_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.6878641843795776, -0.04351484030485153, 0.31220781803131104, 0.43223506212234497, -0.22759273648262024, -0.07725200057029724, -0.010467126034200191, -0.161161407828331, 0.5649675130844116, 0.4623897969722748, -0.8644284009933472, -0.7577137351036072, -0.47610241174697876, 0.032092005014...
null
null
null
null
null
null
null
null
null
null
null
null
null
Zerenidel/Ymir_Fritz-LoRA
Zerenidel
2023-11-12T20:45:40Z
0
0
null
[ "region:us" ]
2023-11-12T20:45:40Z
2023-11-12T10:14:13.000Z
2023-11-12T10:14:13
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_4_17_s_200
arieg
2023-11-12T10:18:27Z
0
0
null
[ "region:us" ]
2023-11-12T10:18:27Z
2023-11-12T10:18:23.000Z
2023-11-12T10:18:23
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1644' '1': '1649' '2': '1661' '3': '1663' splits: - name: train num_bytes: 43937841.0 num_examples: 800 - name: test num_bytes: 1084667.0 num_examples: 20 download_size: 39034892 dataset_size: 45022508.0 --- # Dataset Card for "bw_spec_cls_4_17_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.669532060623169, -0.05652065947651863, 0.3088553845882416, 0.4319959580898285, -0.21349817514419556, -0.08533129096031189, -0.014123118482530117, -0.1453215479850769, 0.5417694449424744, 0.46991974115371704, -0.8565834760665894, -0.7470105290412903, -0.4932659864425659, 0.00801735557615...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_4_18_s_200
arieg
2023-11-12T10:25:19Z
0
0
null
[ "region:us" ]
2023-11-12T10:25:19Z
2023-11-12T10:25:15.000Z
2023-11-12T10:25:15
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1666' '1': '1673' '2': '1680' '3': '1681' splits: - name: train num_bytes: 46542294.0 num_examples: 800 - name: test num_bytes: 1182286.0 num_examples: 20 download_size: 41914749 dataset_size: 47724580.0 --- # Dataset Card for "bw_spec_cls_4_18_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.6761380434036255, -0.0858093872666359, 0.30805814266204834, 0.4716796875, -0.1951882541179657, -0.0910273939371109, 0.006580126937478781, -0.1883048266172409, 0.5621539354324341, 0.5069039463996887, -0.8944674730300903, -0.70440673828125, -0.47233009338378906, 0.04514692723751068, -0....
null
null
null
null
null
null
null
null
null
null
null
null
null
hajili/azerbaijani_tweet_emotion_classification
hajili
2023-11-12T10:39:51Z
0
0
null
[ "task_categories:text-classification", "size_categories:100K<n<1M", "language:az", "license:mit", "region:us" ]
2023-11-12T10:39:51Z
2023-11-12T10:30:24.000Z
2023-11-12T10:30:24
--- license: mit task_categories: - text-classification language: - az size_categories: - 100K<n<1M --- This dataset contains 150K (train + test) cleaned tweets in Azerbaijani. Tweets were collected in 2021, and filtered and cleaned by following these steps: - Initial data were collected by using twint library. The tool is currently deprecated, cannot be used with new Twitter. - On top of the already filtered data, I applied an additional filter to select Azerbaijani tweets with using fastText language identification model. - Tweets were classified into 3 emotion categories: {positive: 1, negative: -1, neutral: 0} by using emojis as rule-based classifier. - Tags, usernames, and emojis were later cleaned. - Short tweets were filtered out.
[ -0.30368420481681824, -0.6262029409408569, 0.22855377197265625, 0.2799319326877594, -0.7011639475822449, 0.4269106090068817, -0.18941086530685425, -0.3540152609348297, 0.31736698746681213, 0.595912516117096, -0.8754683136940002, -0.9152820110321045, -0.7479113340377808, 0.18741603195667267...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_4_19_s_200
arieg
2023-11-12T10:32:07Z
0
0
null
[ "region:us" ]
2023-11-12T10:32:07Z
2023-11-12T10:32:02.000Z
2023-11-12T10:32:02
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1682' '1': '1683' '2': '1684' '3': '1685' splits: - name: train num_bytes: 50010946.0 num_examples: 800 - name: test num_bytes: 1255884.0 num_examples: 20 download_size: 43829751 dataset_size: 51266830.0 --- # Dataset Card for "bw_spec_cls_4_19_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.6655508279800415, -0.08915546536445618, 0.30712780356407166, 0.5075404644012451, -0.2230152040719986, -0.09824226051568985, 0.013300967402756214, -0.15313011407852173, 0.5995368361473083, 0.49655210971832275, -0.855813205242157, -0.6821572780609131, -0.5093393921852112, 0.04117197915911...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_4_20_s_200
arieg
2023-11-12T10:39:07Z
0
0
null
[ "region:us" ]
2023-11-12T10:39:07Z
2023-11-12T10:39:03.000Z
2023-11-12T10:39:03
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1686' '1': '1687' '2': '1688' '3': '1689' splits: - name: train num_bytes: 49278489.0 num_examples: 800 - name: test num_bytes: 1230085.0 num_examples: 20 download_size: 42684649 dataset_size: 50508574.0 --- # Dataset Card for "bw_spec_cls_4_20_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7273234128952026, -0.02429906278848648, 0.31354767084121704, 0.5371763706207275, -0.19626900553703308, -0.0452108234167099, 0.022172199562191963, -0.16310207545757294, 0.5583972930908203, 0.5154549479484558, -0.8418934345245361, -0.7391461133956909, -0.5037573575973511, 0.03854833170771...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_4_21_s_200
arieg
2023-11-12T10:46:05Z
0
0
null
[ "region:us" ]
2023-11-12T10:46:05Z
2023-11-12T10:46:02.000Z
2023-11-12T10:46:02
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1701' '1': '1702' '2': '1703' '3': '1704' splits: - name: train num_bytes: 47438520.0 num_examples: 800 - name: test num_bytes: 1189696.0 num_examples: 20 download_size: 41706565 dataset_size: 48628216.0 --- # Dataset Card for "bw_spec_cls_4_21_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7149372100830078, -0.07974869012832642, 0.2994888126850128, 0.5280974507331848, -0.19787919521331787, -0.032222893089056015, 0.04212867468595505, -0.17726509273052216, 0.5718696117401123, 0.5612350106239319, -0.8795140981674194, -0.7157652378082275, -0.48677369952201843, 0.0125063927844...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_4_22_s_200
arieg
2023-11-12T10:53:02Z
0
0
null
[ "region:us" ]
2023-11-12T10:53:02Z
2023-11-12T10:52:58.000Z
2023-11-12T10:52:58
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1706' '1': '1720' '2': '1732' '3': '1733' splits: - name: train num_bytes: 43566639.0 num_examples: 800 - name: test num_bytes: 1095432.0 num_examples: 20 download_size: 38693515 dataset_size: 44662071.0 --- # Dataset Card for "bw_spec_cls_4_22_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.6448429822921753, -0.07608839869499207, 0.28817757964134216, 0.49422338604927063, -0.19260086119174957, -0.039740461856126785, 0.040765609592199326, -0.1302880495786667, 0.5622949600219727, 0.5338233709335327, -0.8552075624465942, -0.7334362864494324, -0.5329123735427856, -0.00405852124...
null
null
null
null
null
null
null
null
null
null
null
null
null
Basit34/lamma_testab
Basit34
2023-11-12T10:54:26Z
0
0
null
[ "region:us" ]
2023-11-12T10:54:26Z
2023-11-12T10:54:26.000Z
2023-11-12T10:54:26
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
danielz01/pattern-net
danielz01
2023-11-12T11:12:30Z
0
0
null
[ "region:us" ]
2023-11-12T11:12:30Z
2023-11-12T10:59:31.000Z
2023-11-12T10:59:31
--- dataset_info: features: - name: image dtype: image - name: label dtype: string - name: path dtype: string splits: - name: train num_bytes: 822501873.6 num_examples: 30400 download_size: 1422604377 dataset_size: 822501873.6 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "pattern-net" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5361024141311646, -0.2633945047855377, 0.005827180575579405, 0.412420392036438, -0.2040308564901352, -0.0903700739145279, 0.4730702340602875, -0.22846202552318573, 1.0412598848342896, 0.537880539894104, -0.7820331454277039, -0.7284265160560608, -0.6029508709907532, -0.23062850534915924,...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_4_23_s_200
arieg
2023-11-12T11:00:04Z
0
0
null
[ "region:us" ]
2023-11-12T11:00:04Z
2023-11-12T10:59:55.000Z
2023-11-12T10:59:55
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1735' '1': '1736' '2': '1883' '3': '1891' splits: - name: train num_bytes: 43184098.0 num_examples: 800 - name: test num_bytes: 1095480.0 num_examples: 20 download_size: 37432845 dataset_size: 44279578.0 --- # Dataset Card for "bw_spec_cls_4_23_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.6909692287445068, -0.01714926026761532, 0.3029547929763794, 0.525115966796875, -0.2046061009168625, 0.015315896831452847, 0.00468512112274766, -0.15499991178512573, 0.5735846757888794, 0.547326922416687, -0.8971527814865112, -0.7420588731765747, -0.48295509815216064, 0.00652380613610148...
null
null
null
null
null
null
null
null
null
null
null
null
null
RASMUS/TTS_datasets
RASMUS
2023-11-17T13:45:39Z
0
0
null
[ "license:apache-2.0", "region:us" ]
2023-11-17T13:45:39Z
2023-11-12T11:00:49.000Z
2023-11-12T11:00:49
--- license: apache-2.0 ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_4_24_s_200
arieg
2023-11-12T11:07:04Z
0
0
null
[ "region:us" ]
2023-11-12T11:07:04Z
2023-11-12T11:07:00.000Z
2023-11-12T11:07:00
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '1924' '1': '1925' '2': '1929' '3': '1930' splits: - name: train num_bytes: 44364265.0 num_examples: 800 - name: test num_bytes: 1116045.0 num_examples: 20 download_size: 38996336 dataset_size: 45480310.0 --- # Dataset Card for "bw_spec_cls_4_24_s_200" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.6609888672828674, -0.018684787675738335, 0.2882879674434662, 0.5171388983726501, -0.19733238220214844, -0.014689170755445957, 0.003392522456124425, -0.15261371433734894, 0.5651556849479675, 0.4959110617637634, -0.9046046137809753, -0.744354248046875, -0.46959102153778076, -0.02901744470...
null
null
null
null
null
null
null
null
null
null
null
null
null
msaidkartal/denoise-prostateMRI
msaidkartal
2023-11-16T15:50:30Z
0
0
null
[ "region:us" ]
2023-11-16T15:50:30Z
2023-11-12T11:09:12.000Z
2023-11-12T11:09:12
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
sdasdadas/nov
sdasdadas
2023-11-12T11:09:28Z
0
0
null
[ "region:us" ]
2023-11-12T11:09:28Z
2023-11-12T11:09:28.000Z
2023-11-12T11:09:28
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
JoaoJunior/java-encoded-small
JoaoJunior
2023-11-12T11:20:30Z
0
0
null
[ "region:us" ]
2023-11-12T11:20:30Z
2023-11-12T11:20:22.000Z
2023-11-12T11:20:22
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: rem dtype: string - name: add dtype: string - name: context dtype: string - name: meta dtype: string - name: input_ids sequence: int32 - name: attention_mask sequence: int8 - name: labels sequence: int64 splits: - name: train num_bytes: 2551158 num_examples: 800 - name: test num_bytes: 641178 num_examples: 200 download_size: 391779 dataset_size: 3192336 --- # Dataset Card for "java-encoded-small" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5919890999794006, -0.3184680640697479, 0.2723858654499054, 0.07242731750011444, -0.31721097230911255, -0.2258743792772293, -0.10625752061605453, -0.26794639229774475, 0.9353885054588318, 0.516826868057251, -0.6559926271438599, -0.7133797407150269, -0.8145567774772644, -0.095183864235877...
null
null
null
null
null
null
null
null
null
null
null
null
null
Pablao0948/Nelson_Ned
Pablao0948
2023-11-12T11:34:01Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-12T11:34:01Z
2023-11-12T11:33:32.000Z
2023-11-12T11:33:32
--- license: openrail ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
bonobono612/codeparrot-valid
bonobono612
2023-11-12T11:34:11Z
0
0
null
[ "region:us" ]
2023-11-12T11:34:11Z
2023-11-12T11:34:11.000Z
2023-11-12T11:34:11
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
celsowm/ambito_juridico_artigos
celsowm
2023-11-12T23:11:50Z
0
0
null
[ "region:us" ]
2023-11-12T23:11:50Z
2023-11-12T12:24:47.000Z
2023-11-12T12:24:47
--- dataset_info: features: - name: titulo dtype: string - name: resumo dtype: string - name: categoria dtype: string - name: texto dtype: string - name: link dtype: string splits: - name: train num_bytes: 390296460 num_examples: 11373 download_size: 0 dataset_size: 390296460 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "ambito_juridico_artigos" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5168543457984924, -0.1968289315700531, 0.09452714771032333, 0.073697030544281, -0.4496545195579529, 0.19817237555980682, 0.11382137984037399, -0.28204989433288574, 1.2752985954284668, 0.6923308372497559, -0.6024848818778992, -0.8862519264221191, -0.5535463690757751, -0.2266455888748169,...
null
null
null
null
null
null
null
null
null
null
null
null
null
nourheshamshaheen/processed_chart_to_table_mix
nourheshamshaheen
2023-11-12T12:58:47Z
0
0
null
[ "region:us" ]
2023-11-12T12:58:47Z
2023-11-12T12:45:13.000Z
2023-11-12T12:45:13
--- dataset_info: features: - name: image dtype: image - name: text dtype: string - name: type dtype: string splits: - name: train num_bytes: 101062301.385 num_examples: 2245 - name: test num_bytes: 25059693.0 num_examples: 562 download_size: 108892113 dataset_size: 126121994.385 configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* --- # Dataset Card for "processed_chart_to_table_mix" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5740682482719421, -0.3010896146297455, 0.1481054425239563, 0.44709253311157227, -0.348718523979187, 0.222720205783844, 0.33188924193382263, -0.3573542535305023, 0.9100584983825684, 0.8425918221473694, -0.6886478662490845, -0.8515757918357849, -0.6687929630279541, -0.47995996475219727, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
toilaluan/nike_laion
toilaluan
2023-11-12T13:30:35Z
0
0
null
[ "region:us" ]
2023-11-12T13:30:35Z
2023-11-12T12:59:17.000Z
2023-11-12T12:59:17
--- dataset_info: features: - name: image dtype: image - name: url dtype: string - name: caption dtype: string - name: id dtype: int64 - name: similarity dtype: float64 splits: - name: train num_bytes: 343403671.807 num_examples: 5117 download_size: 282913216 dataset_size: 343403671.807 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "nike_laion" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.3809514343738556, -0.0672537237405777, 0.11807630211114883, 0.17741547524929047, -0.288364052772522, -0.0033889226615428925, 0.22762539982795715, -0.0643651932477951, 0.7417449951171875, 0.28097665309906006, -0.9647209048271179, -0.878637969493866, -0.6031560897827148, -0.44743454456329...
null
null
null
null
null
null
null
null
null
null
null
null
null
nourheshamshaheen/new_typed_chart_to_table_mix
nourheshamshaheen
2023-11-12T13:21:09Z
0
0
null
[ "region:us" ]
2023-11-12T13:21:09Z
2023-11-12T13:21:06.000Z
2023-11-12T13:21:06
--- dataset_info: features: - name: image dtype: image - name: text dtype: string - name: type dtype: string splits: - name: test num_bytes: 25058693.0 num_examples: 562 download_size: 21501493 dataset_size: 25058693.0 configs: - config_name: default data_files: - split: test path: data/test-* --- # Dataset Card for "new_typed_chart_to_table_mix" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.645158052444458, -0.22195856273174286, 0.09398407489061356, 0.4135932922363281, -0.4115913212299347, 0.4073984920978546, 0.41913270950317383, -0.31995338201522827, 1.0107511281967163, 0.6879815459251404, -0.45125681161880493, -0.936941921710968, -0.7686922550201416, -0.44666171073913574...
null
null
null
null
null
null
null
null
null
null
null
null
null
nhlevy/president
nhlevy
2023-11-12T13:31:03Z
0
0
null
[ "region:us" ]
2023-11-12T13:31:03Z
2023-11-12T13:31:03.000Z
2023-11-12T13:31:03
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
nourheshamshaheen/temp
nourheshamshaheen
2023-11-12T14:34:44Z
0
0
null
[ "region:us" ]
2023-11-12T14:34:44Z
2023-11-12T14:34:41.000Z
2023-11-12T14:34:41
--- dataset_info: features: - name: image dtype: image - name: text dtype: string - name: type dtype: string splits: - name: test num_bytes: 25058975.0 num_examples: 562 download_size: 21501906 dataset_size: 25058975.0 configs: - config_name: default data_files: - split: test path: data/test-* --- # Dataset Card for "temp" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5142354369163513, -0.1874426305294037, 0.24313639104366302, 0.03319007530808449, -0.43021491169929504, -0.0007399906753562391, 0.17957881093025208, 0.008021633140742779, 0.8902769684791565, 0.4188593327999115, -0.7325683236122131, -0.7931426763534546, -0.5049940347671509, -0.32602640986...
null
null
null
null
null
null
null
null
null
null
null
null
null
BangumiBase/shoujokagekirevuestarlight
BangumiBase
2023-11-12T16:31:58Z
0
0
null
[ "size_categories:1K<n<10K", "license:mit", "art", "region:us" ]
2023-11-12T16:31:58Z
2023-11-12T14:38:55.000Z
2023-11-12T14:38:55
--- license: mit tags: - art size_categories: - 1K<n<10K --- # Bangumi Image Base of Shoujo Kageki Revue Starlight This is the image base of bangumi Shoujo Kageki Revue Starlight, we detected 55 characters, 3633 images in total. The full dataset is [here](all.zip). **Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual.** If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability). Here is the characters' preview: | # | Images | Download | Preview 1 | Preview 2 | Preview 3 | Preview 4 | Preview 5 | Preview 6 | Preview 7 | Preview 8 | |:------|---------:|:---------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------|:-------------------------------| | 0 | 65 | [Download](0/dataset.zip) | ![preview 1](0/preview_1.png) | ![preview 2](0/preview_2.png) | ![preview 3](0/preview_3.png) | ![preview 4](0/preview_4.png) | ![preview 5](0/preview_5.png) | ![preview 6](0/preview_6.png) | ![preview 7](0/preview_7.png) | ![preview 8](0/preview_8.png) | | 1 | 302 | [Download](1/dataset.zip) | ![preview 1](1/preview_1.png) | ![preview 2](1/preview_2.png) | ![preview 3](1/preview_3.png) | ![preview 4](1/preview_4.png) | ![preview 5](1/preview_5.png) | ![preview 6](1/preview_6.png) | ![preview 7](1/preview_7.png) | ![preview 8](1/preview_8.png) | | 2 | 201 | [Download](2/dataset.zip) | ![preview 1](2/preview_1.png) | ![preview 2](2/preview_2.png) | ![preview 3](2/preview_3.png) | ![preview 4](2/preview_4.png) | ![preview 5](2/preview_5.png) | ![preview 6](2/preview_6.png) | ![preview 7](2/preview_7.png) | ![preview 8](2/preview_8.png) | | 3 | 39 | [Download](3/dataset.zip) | ![preview 1](3/preview_1.png) | ![preview 2](3/preview_2.png) | ![preview 3](3/preview_3.png) | ![preview 4](3/preview_4.png) | ![preview 5](3/preview_5.png) | ![preview 6](3/preview_6.png) | ![preview 7](3/preview_7.png) | ![preview 8](3/preview_8.png) | | 4 | 28 | [Download](4/dataset.zip) | ![preview 1](4/preview_1.png) | ![preview 2](4/preview_2.png) | ![preview 3](4/preview_3.png) | ![preview 4](4/preview_4.png) | ![preview 5](4/preview_5.png) | ![preview 6](4/preview_6.png) | ![preview 7](4/preview_7.png) | ![preview 8](4/preview_8.png) | | 5 | 46 | [Download](5/dataset.zip) | ![preview 1](5/preview_1.png) | ![preview 2](5/preview_2.png) | ![preview 3](5/preview_3.png) | ![preview 4](5/preview_4.png) | ![preview 5](5/preview_5.png) | ![preview 6](5/preview_6.png) | ![preview 7](5/preview_7.png) | ![preview 8](5/preview_8.png) | | 6 | 215 | [Download](6/dataset.zip) | ![preview 1](6/preview_1.png) | ![preview 2](6/preview_2.png) | ![preview 3](6/preview_3.png) | ![preview 4](6/preview_4.png) | ![preview 5](6/preview_5.png) | ![preview 6](6/preview_6.png) | ![preview 7](6/preview_7.png) | ![preview 8](6/preview_8.png) | | 7 | 17 | [Download](7/dataset.zip) | ![preview 1](7/preview_1.png) | ![preview 2](7/preview_2.png) | ![preview 3](7/preview_3.png) | ![preview 4](7/preview_4.png) | ![preview 5](7/preview_5.png) | ![preview 6](7/preview_6.png) | ![preview 7](7/preview_7.png) | ![preview 8](7/preview_8.png) | | 8 | 29 | [Download](8/dataset.zip) | ![preview 1](8/preview_1.png) | ![preview 2](8/preview_2.png) | ![preview 3](8/preview_3.png) | ![preview 4](8/preview_4.png) | ![preview 5](8/preview_5.png) | ![preview 6](8/preview_6.png) | ![preview 7](8/preview_7.png) | ![preview 8](8/preview_8.png) | | 9 | 10 | [Download](9/dataset.zip) | ![preview 1](9/preview_1.png) | ![preview 2](9/preview_2.png) | ![preview 3](9/preview_3.png) | ![preview 4](9/preview_4.png) | ![preview 5](9/preview_5.png) | ![preview 6](9/preview_6.png) | ![preview 7](9/preview_7.png) | ![preview 8](9/preview_8.png) | | 10 | 21 | [Download](10/dataset.zip) | ![preview 1](10/preview_1.png) | ![preview 2](10/preview_2.png) | ![preview 3](10/preview_3.png) | ![preview 4](10/preview_4.png) | ![preview 5](10/preview_5.png) | ![preview 6](10/preview_6.png) | ![preview 7](10/preview_7.png) | ![preview 8](10/preview_8.png) | | 11 | 9 | [Download](11/dataset.zip) | ![preview 1](11/preview_1.png) | ![preview 2](11/preview_2.png) | ![preview 3](11/preview_3.png) | ![preview 4](11/preview_4.png) | ![preview 5](11/preview_5.png) | ![preview 6](11/preview_6.png) | ![preview 7](11/preview_7.png) | ![preview 8](11/preview_8.png) | | 12 | 189 | [Download](12/dataset.zip) | ![preview 1](12/preview_1.png) | ![preview 2](12/preview_2.png) | ![preview 3](12/preview_3.png) | ![preview 4](12/preview_4.png) | ![preview 5](12/preview_5.png) | ![preview 6](12/preview_6.png) | ![preview 7](12/preview_7.png) | ![preview 8](12/preview_8.png) | | 13 | 17 | [Download](13/dataset.zip) | ![preview 1](13/preview_1.png) | ![preview 2](13/preview_2.png) | ![preview 3](13/preview_3.png) | ![preview 4](13/preview_4.png) | ![preview 5](13/preview_5.png) | ![preview 6](13/preview_6.png) | ![preview 7](13/preview_7.png) | ![preview 8](13/preview_8.png) | | 14 | 214 | [Download](14/dataset.zip) | ![preview 1](14/preview_1.png) | ![preview 2](14/preview_2.png) | ![preview 3](14/preview_3.png) | ![preview 4](14/preview_4.png) | ![preview 5](14/preview_5.png) | ![preview 6](14/preview_6.png) | ![preview 7](14/preview_7.png) | ![preview 8](14/preview_8.png) | | 15 | 31 | [Download](15/dataset.zip) | ![preview 1](15/preview_1.png) | ![preview 2](15/preview_2.png) | ![preview 3](15/preview_3.png) | ![preview 4](15/preview_4.png) | ![preview 5](15/preview_5.png) | ![preview 6](15/preview_6.png) | ![preview 7](15/preview_7.png) | ![preview 8](15/preview_8.png) | | 16 | 42 | [Download](16/dataset.zip) | ![preview 1](16/preview_1.png) | ![preview 2](16/preview_2.png) | ![preview 3](16/preview_3.png) | ![preview 4](16/preview_4.png) | ![preview 5](16/preview_5.png) | ![preview 6](16/preview_6.png) | ![preview 7](16/preview_7.png) | ![preview 8](16/preview_8.png) | | 17 | 151 | [Download](17/dataset.zip) | ![preview 1](17/preview_1.png) | ![preview 2](17/preview_2.png) | ![preview 3](17/preview_3.png) | ![preview 4](17/preview_4.png) | ![preview 5](17/preview_5.png) | ![preview 6](17/preview_6.png) | ![preview 7](17/preview_7.png) | ![preview 8](17/preview_8.png) | | 18 | 32 | [Download](18/dataset.zip) | ![preview 1](18/preview_1.png) | ![preview 2](18/preview_2.png) | ![preview 3](18/preview_3.png) | ![preview 4](18/preview_4.png) | ![preview 5](18/preview_5.png) | ![preview 6](18/preview_6.png) | ![preview 7](18/preview_7.png) | ![preview 8](18/preview_8.png) | | 19 | 43 | [Download](19/dataset.zip) | ![preview 1](19/preview_1.png) | ![preview 2](19/preview_2.png) | ![preview 3](19/preview_3.png) | ![preview 4](19/preview_4.png) | ![preview 5](19/preview_5.png) | ![preview 6](19/preview_6.png) | ![preview 7](19/preview_7.png) | ![preview 8](19/preview_8.png) | | 20 | 235 | [Download](20/dataset.zip) | ![preview 1](20/preview_1.png) | ![preview 2](20/preview_2.png) | ![preview 3](20/preview_3.png) | ![preview 4](20/preview_4.png) | ![preview 5](20/preview_5.png) | ![preview 6](20/preview_6.png) | ![preview 7](20/preview_7.png) | ![preview 8](20/preview_8.png) | | 21 | 27 | [Download](21/dataset.zip) | ![preview 1](21/preview_1.png) | ![preview 2](21/preview_2.png) | ![preview 3](21/preview_3.png) | ![preview 4](21/preview_4.png) | ![preview 5](21/preview_5.png) | ![preview 6](21/preview_6.png) | ![preview 7](21/preview_7.png) | ![preview 8](21/preview_8.png) | | 22 | 134 | [Download](22/dataset.zip) | ![preview 1](22/preview_1.png) | ![preview 2](22/preview_2.png) | ![preview 3](22/preview_3.png) | ![preview 4](22/preview_4.png) | ![preview 5](22/preview_5.png) | ![preview 6](22/preview_6.png) | ![preview 7](22/preview_7.png) | ![preview 8](22/preview_8.png) | | 23 | 12 | [Download](23/dataset.zip) | ![preview 1](23/preview_1.png) | ![preview 2](23/preview_2.png) | ![preview 3](23/preview_3.png) | ![preview 4](23/preview_4.png) | ![preview 5](23/preview_5.png) | ![preview 6](23/preview_6.png) | ![preview 7](23/preview_7.png) | ![preview 8](23/preview_8.png) | | 24 | 23 | [Download](24/dataset.zip) | ![preview 1](24/preview_1.png) | ![preview 2](24/preview_2.png) | ![preview 3](24/preview_3.png) | ![preview 4](24/preview_4.png) | ![preview 5](24/preview_5.png) | ![preview 6](24/preview_6.png) | ![preview 7](24/preview_7.png) | ![preview 8](24/preview_8.png) | | 25 | 11 | [Download](25/dataset.zip) | ![preview 1](25/preview_1.png) | ![preview 2](25/preview_2.png) | ![preview 3](25/preview_3.png) | ![preview 4](25/preview_4.png) | ![preview 5](25/preview_5.png) | ![preview 6](25/preview_6.png) | ![preview 7](25/preview_7.png) | ![preview 8](25/preview_8.png) | | 26 | 18 | [Download](26/dataset.zip) | ![preview 1](26/preview_1.png) | ![preview 2](26/preview_2.png) | ![preview 3](26/preview_3.png) | ![preview 4](26/preview_4.png) | ![preview 5](26/preview_5.png) | ![preview 6](26/preview_6.png) | ![preview 7](26/preview_7.png) | ![preview 8](26/preview_8.png) | | 27 | 14 | [Download](27/dataset.zip) | ![preview 1](27/preview_1.png) | ![preview 2](27/preview_2.png) | ![preview 3](27/preview_3.png) | ![preview 4](27/preview_4.png) | ![preview 5](27/preview_5.png) | ![preview 6](27/preview_6.png) | ![preview 7](27/preview_7.png) | ![preview 8](27/preview_8.png) | | 28 | 38 | [Download](28/dataset.zip) | ![preview 1](28/preview_1.png) | ![preview 2](28/preview_2.png) | ![preview 3](28/preview_3.png) | ![preview 4](28/preview_4.png) | ![preview 5](28/preview_5.png) | ![preview 6](28/preview_6.png) | ![preview 7](28/preview_7.png) | ![preview 8](28/preview_8.png) | | 29 | 18 | [Download](29/dataset.zip) | ![preview 1](29/preview_1.png) | ![preview 2](29/preview_2.png) | ![preview 3](29/preview_3.png) | ![preview 4](29/preview_4.png) | ![preview 5](29/preview_5.png) | ![preview 6](29/preview_6.png) | ![preview 7](29/preview_7.png) | ![preview 8](29/preview_8.png) | | 30 | 215 | [Download](30/dataset.zip) | ![preview 1](30/preview_1.png) | ![preview 2](30/preview_2.png) | ![preview 3](30/preview_3.png) | ![preview 4](30/preview_4.png) | ![preview 5](30/preview_5.png) | ![preview 6](30/preview_6.png) | ![preview 7](30/preview_7.png) | ![preview 8](30/preview_8.png) | | 31 | 134 | [Download](31/dataset.zip) | ![preview 1](31/preview_1.png) | ![preview 2](31/preview_2.png) | ![preview 3](31/preview_3.png) | ![preview 4](31/preview_4.png) | ![preview 5](31/preview_5.png) | ![preview 6](31/preview_6.png) | ![preview 7](31/preview_7.png) | ![preview 8](31/preview_8.png) | | 32 | 29 | [Download](32/dataset.zip) | ![preview 1](32/preview_1.png) | ![preview 2](32/preview_2.png) | ![preview 3](32/preview_3.png) | ![preview 4](32/preview_4.png) | ![preview 5](32/preview_5.png) | ![preview 6](32/preview_6.png) | ![preview 7](32/preview_7.png) | ![preview 8](32/preview_8.png) | | 33 | 20 | [Download](33/dataset.zip) | ![preview 1](33/preview_1.png) | ![preview 2](33/preview_2.png) | ![preview 3](33/preview_3.png) | ![preview 4](33/preview_4.png) | ![preview 5](33/preview_5.png) | ![preview 6](33/preview_6.png) | ![preview 7](33/preview_7.png) | ![preview 8](33/preview_8.png) | | 34 | 20 | [Download](34/dataset.zip) | ![preview 1](34/preview_1.png) | ![preview 2](34/preview_2.png) | ![preview 3](34/preview_3.png) | ![preview 4](34/preview_4.png) | ![preview 5](34/preview_5.png) | ![preview 6](34/preview_6.png) | ![preview 7](34/preview_7.png) | ![preview 8](34/preview_8.png) | | 35 | 6 | [Download](35/dataset.zip) | ![preview 1](35/preview_1.png) | ![preview 2](35/preview_2.png) | ![preview 3](35/preview_3.png) | ![preview 4](35/preview_4.png) | ![preview 5](35/preview_5.png) | ![preview 6](35/preview_6.png) | N/A | N/A | | 36 | 38 | [Download](36/dataset.zip) | ![preview 1](36/preview_1.png) | ![preview 2](36/preview_2.png) | ![preview 3](36/preview_3.png) | ![preview 4](36/preview_4.png) | ![preview 5](36/preview_5.png) | ![preview 6](36/preview_6.png) | ![preview 7](36/preview_7.png) | ![preview 8](36/preview_8.png) | | 37 | 9 | [Download](37/dataset.zip) | ![preview 1](37/preview_1.png) | ![preview 2](37/preview_2.png) | ![preview 3](37/preview_3.png) | ![preview 4](37/preview_4.png) | ![preview 5](37/preview_5.png) | ![preview 6](37/preview_6.png) | ![preview 7](37/preview_7.png) | ![preview 8](37/preview_8.png) | | 38 | 14 | [Download](38/dataset.zip) | ![preview 1](38/preview_1.png) | ![preview 2](38/preview_2.png) | ![preview 3](38/preview_3.png) | ![preview 4](38/preview_4.png) | ![preview 5](38/preview_5.png) | ![preview 6](38/preview_6.png) | ![preview 7](38/preview_7.png) | ![preview 8](38/preview_8.png) | | 39 | 10 | [Download](39/dataset.zip) | ![preview 1](39/preview_1.png) | ![preview 2](39/preview_2.png) | ![preview 3](39/preview_3.png) | ![preview 4](39/preview_4.png) | ![preview 5](39/preview_5.png) | ![preview 6](39/preview_6.png) | ![preview 7](39/preview_7.png) | ![preview 8](39/preview_8.png) | | 40 | 24 | [Download](40/dataset.zip) | ![preview 1](40/preview_1.png) | ![preview 2](40/preview_2.png) | ![preview 3](40/preview_3.png) | ![preview 4](40/preview_4.png) | ![preview 5](40/preview_5.png) | ![preview 6](40/preview_6.png) | ![preview 7](40/preview_7.png) | ![preview 8](40/preview_8.png) | | 41 | 45 | [Download](41/dataset.zip) | ![preview 1](41/preview_1.png) | ![preview 2](41/preview_2.png) | ![preview 3](41/preview_3.png) | ![preview 4](41/preview_4.png) | ![preview 5](41/preview_5.png) | ![preview 6](41/preview_6.png) | ![preview 7](41/preview_7.png) | ![preview 8](41/preview_8.png) | | 42 | 10 | [Download](42/dataset.zip) | ![preview 1](42/preview_1.png) | ![preview 2](42/preview_2.png) | ![preview 3](42/preview_3.png) | ![preview 4](42/preview_4.png) | ![preview 5](42/preview_5.png) | ![preview 6](42/preview_6.png) | ![preview 7](42/preview_7.png) | ![preview 8](42/preview_8.png) | | 43 | 7 | [Download](43/dataset.zip) | ![preview 1](43/preview_1.png) | ![preview 2](43/preview_2.png) | ![preview 3](43/preview_3.png) | ![preview 4](43/preview_4.png) | ![preview 5](43/preview_5.png) | ![preview 6](43/preview_6.png) | ![preview 7](43/preview_7.png) | N/A | | 44 | 28 | [Download](44/dataset.zip) | ![preview 1](44/preview_1.png) | ![preview 2](44/preview_2.png) | ![preview 3](44/preview_3.png) | ![preview 4](44/preview_4.png) | ![preview 5](44/preview_5.png) | ![preview 6](44/preview_6.png) | ![preview 7](44/preview_7.png) | ![preview 8](44/preview_8.png) | | 45 | 42 | [Download](45/dataset.zip) | ![preview 1](45/preview_1.png) | ![preview 2](45/preview_2.png) | ![preview 3](45/preview_3.png) | ![preview 4](45/preview_4.png) | ![preview 5](45/preview_5.png) | ![preview 6](45/preview_6.png) | ![preview 7](45/preview_7.png) | ![preview 8](45/preview_8.png) | | 46 | 183 | [Download](46/dataset.zip) | ![preview 1](46/preview_1.png) | ![preview 2](46/preview_2.png) | ![preview 3](46/preview_3.png) | ![preview 4](46/preview_4.png) | ![preview 5](46/preview_5.png) | ![preview 6](46/preview_6.png) | ![preview 7](46/preview_7.png) | ![preview 8](46/preview_8.png) | | 47 | 28 | [Download](47/dataset.zip) | ![preview 1](47/preview_1.png) | ![preview 2](47/preview_2.png) | ![preview 3](47/preview_3.png) | ![preview 4](47/preview_4.png) | ![preview 5](47/preview_5.png) | ![preview 6](47/preview_6.png) | ![preview 7](47/preview_7.png) | ![preview 8](47/preview_8.png) | | 48 | 10 | [Download](48/dataset.zip) | ![preview 1](48/preview_1.png) | ![preview 2](48/preview_2.png) | ![preview 3](48/preview_3.png) | ![preview 4](48/preview_4.png) | ![preview 5](48/preview_5.png) | ![preview 6](48/preview_6.png) | ![preview 7](48/preview_7.png) | ![preview 8](48/preview_8.png) | | 49 | 50 | [Download](49/dataset.zip) | ![preview 1](49/preview_1.png) | ![preview 2](49/preview_2.png) | ![preview 3](49/preview_3.png) | ![preview 4](49/preview_4.png) | ![preview 5](49/preview_5.png) | ![preview 6](49/preview_6.png) | ![preview 7](49/preview_7.png) | ![preview 8](49/preview_8.png) | | 50 | 38 | [Download](50/dataset.zip) | ![preview 1](50/preview_1.png) | ![preview 2](50/preview_2.png) | ![preview 3](50/preview_3.png) | ![preview 4](50/preview_4.png) | ![preview 5](50/preview_5.png) | ![preview 6](50/preview_6.png) | ![preview 7](50/preview_7.png) | ![preview 8](50/preview_8.png) | | 51 | 30 | [Download](51/dataset.zip) | ![preview 1](51/preview_1.png) | ![preview 2](51/preview_2.png) | ![preview 3](51/preview_3.png) | ![preview 4](51/preview_4.png) | ![preview 5](51/preview_5.png) | ![preview 6](51/preview_6.png) | ![preview 7](51/preview_7.png) | ![preview 8](51/preview_8.png) | | 52 | 60 | [Download](52/dataset.zip) | ![preview 1](52/preview_1.png) | ![preview 2](52/preview_2.png) | ![preview 3](52/preview_3.png) | ![preview 4](52/preview_4.png) | ![preview 5](52/preview_5.png) | ![preview 6](52/preview_6.png) | ![preview 7](52/preview_7.png) | ![preview 8](52/preview_8.png) | | 53 | 37 | [Download](53/dataset.zip) | ![preview 1](53/preview_1.png) | ![preview 2](53/preview_2.png) | ![preview 3](53/preview_3.png) | ![preview 4](53/preview_4.png) | ![preview 5](53/preview_5.png) | ![preview 6](53/preview_6.png) | ![preview 7](53/preview_7.png) | ![preview 8](53/preview_8.png) | | noise | 315 | [Download](-1/dataset.zip) | ![preview 1](-1/preview_1.png) | ![preview 2](-1/preview_2.png) | ![preview 3](-1/preview_3.png) | ![preview 4](-1/preview_4.png) | ![preview 5](-1/preview_5.png) | ![preview 6](-1/preview_6.png) | ![preview 7](-1/preview_7.png) | ![preview 8](-1/preview_8.png) |
[ -0.6870681643486023, -0.14234180748462677, 0.13797248899936676, 0.2169700413942337, -0.2506984770298004, -0.0755891427397728, -0.0659128725528717, -0.36988601088523865, 0.6307777762413025, 0.5385001301765442, -0.9247044324874878, -0.86280357837677, -0.6595712900161743, 0.5120773315429688, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
NickKolok/regs-lametta-beastboost
NickKolok
2023-11-24T12:15:24Z
0
0
null
[ "license:agpl-3.0", "region:us" ]
2023-11-24T12:15:24Z
2023-11-12T14:39:54.000Z
2023-11-12T14:39:54
--- license: agpl-3.0 ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
Azie88/COVID_Vaccine_Tweet_sentiment_analysis_Distilbert
Azie88
2023-11-12T14:55:48Z
0
0
null
[ "region:us" ]
2023-11-12T14:55:48Z
2023-11-12T14:55:45.000Z
2023-11-12T14:55:45
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: eval path: data/eval-* dataset_info: features: - name: input_ids sequence: int32 - name: attention_mask sequence: int8 - name: labels dtype: int64 splits: - name: train num_bytes: 1712789 num_examples: 7999 - name: eval num_bytes: 472000 num_examples: 2000 download_size: 505986 dataset_size: 2184789 --- # Dataset Card for "COVID_Vaccine_Tweet_sentiment_analysis_Distilbert" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.32166069746017456, -0.4138492941856384, -0.02872658148407936, 0.4444462060928345, -0.3147890567779541, 0.22228564321994781, 0.24801144003868103, 0.1511923372745514, 0.8182873725891113, -0.153045654296875, -0.8116793036460876, -1.0009751319885254, -0.8672338128089905, -0.2390673458576202...
null
null
null
null
null
null
null
null
null
null
null
null
null
Norod78/pringles-blip-captions
Norod78
2023-11-12T18:36:09Z
0
0
null
[ "region:us" ]
2023-11-12T18:36:09Z
2023-11-12T14:57:18.000Z
2023-11-12T14:57:18
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: text dtype: string splits: - name: train num_bytes: 221312765.0 num_examples: 138 download_size: 221194479 dataset_size: 221312765.0 --- # Dataset Card for "pringles-blip-captions" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.42627331614494324, -0.059538476169109344, 0.1847349852323532, 0.5329986810684204, -0.3131306767463684, 0.026605790480971336, -0.07083245366811752, -0.009118268266320229, 0.6297727227210999, 0.6425506472587585, -0.6542299389839172, -0.6162760257720947, -0.5586721897125244, 0.026561260223...
null
null
null
null
null
null
null
null
null
null
null
null
null
turkish-nlp-suite/Havadis
turkish-nlp-suite
2023-11-27T13:28:32Z
0
1
null
[ "task_categories:text-generation", "task_categories:summarization", "task_categories:fill-mask", "size_categories:100K<n<1M", "language:tr", "license:cc-by-sa-4.0", "newspaper", "region:us" ]
2023-11-27T13:28:32Z
2023-11-12T15:36:02.000Z
2023-11-12T15:36:02
--- license: cc-by-sa-4.0 task_categories: - text-generation - summarization - fill-mask language: - tr tags: - newspaper pretty_name: Havadis size_categories: - 100K<n<1M ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
nourheshamshaheen/typed_final_chart_to_table
nourheshamshaheen
2023-11-12T15:42:12Z
0
0
null
[ "region:us" ]
2023-11-12T15:42:12Z
2023-11-12T15:38:39.000Z
2023-11-12T15:38:39
--- dataset_info: features: - name: image dtype: image - name: text dtype: string - name: type dtype: string splits: - name: test num_bytes: 25058975.0 num_examples: 562 - name: train num_bytes: 101059498.385 num_examples: 2245 download_size: 108892387 dataset_size: 126118473.385 configs: - config_name: default data_files: - split: test path: data/test-* - split: train path: data/train-* --- # Dataset Card for "typed_final_chart_to_table" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.44622471928596497, -0.2643173933029175, 0.41774922609329224, 0.19233787059783936, -0.4279243052005768, 0.18374553322792053, 0.2460070550441742, -0.13159292936325073, 0.7715935111045837, 0.7962278723716736, -0.5583736896514893, -1.0293867588043213, -0.7338480353355408, -0.349274903535842...
null
null
null
null
null
null
null
null
null
null
null
null
null
nourheshamshaheen/final_chart_to_table
nourheshamshaheen
2023-11-12T15:42:54Z
0
0
null
[ "region:us" ]
2023-11-12T15:42:54Z
2023-11-12T15:42:27.000Z
2023-11-12T15:42:27
--- dataset_info: features: - name: image dtype: image - name: text dtype: string - name: type dtype: string splits: - name: train num_bytes: 101059151.385 num_examples: 2245 - name: test num_bytes: 25058843.0 num_examples: 562 download_size: 108890579 dataset_size: 126117994.385 configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* --- # Dataset Card for "final_chart_to_table" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5340485572814941, -0.24294787645339966, 0.42568811774253845, 0.12648552656173706, -0.388469398021698, 0.016836252063512802, 0.2667716443538666, -0.19567018747329712, 0.71126389503479, 0.8466155529022217, -0.6729673147201538, -1.0034176111221313, -0.6741611361503601, -0.3694758415222168,...
null
null
null
null
null
null
null
null
null
null
null
null
null
naufalnashif/tweets-biskita-transpakuan-2022
naufalnashif
2023-11-12T15:47:45Z
0
0
null
[ "license:mit", "region:us" ]
2023-11-12T15:47:45Z
2023-11-12T15:47:45.000Z
2023-11-12T15:47:45
--- license: mit ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
orgcatorg/stripes
orgcatorg
2023-11-28T04:09:34Z
0
0
null
[ "region:us" ]
2023-11-28T04:09:34Z
2023-11-12T16:15:12.000Z
2023-11-12T16:15:12
--- configs: - config_name: Africa data_files: - split: train path: Africa/train-* - config_name: Asia-Pacific data_files: - split: train path: Asia-Pacific/train-* - config_name: Europe data_files: - split: train path: Europe/train-* - config_name: Middle East data_files: - split: train path: Middle East/train-* - config_name: US data_files: - split: train path: US/train-* dataset_info: - config_name: Africa features: - name: content dtype: string - name: title dtype: string - name: source_link dtype: string - name: description dtype: string - name: date dtype: timestamp[ns] - name: image dtype: string - name: image_caption dtype: string - name: category dtype: string splits: - name: train num_bytes: 190008 num_examples: 32 download_size: 134781 dataset_size: 190008 - config_name: Asia-Pacific features: - name: content dtype: string - name: title dtype: string - name: source_link dtype: string - name: description dtype: string - name: date dtype: timestamp[ns] - name: image dtype: string - name: image_caption dtype: string - name: category dtype: string splits: - name: train num_bytes: 380004 num_examples: 83 download_size: 247183 dataset_size: 380004 - config_name: Europe features: - name: content dtype: string - name: title dtype: string - name: source_link dtype: string - name: description dtype: string - name: date dtype: timestamp[ns] - name: image dtype: string - name: image_caption dtype: string - name: category dtype: string splits: - name: train num_bytes: 768040 num_examples: 154 download_size: 467395 dataset_size: 768040 - config_name: Middle East features: - name: content dtype: string - name: title dtype: string - name: source_link dtype: string - name: description dtype: string - name: date dtype: timestamp[ns] - name: image dtype: string - name: image_caption dtype: string - name: category dtype: string splits: - name: train num_bytes: 915950 num_examples: 126 download_size: 537837 dataset_size: 915950 - config_name: US features: - name: content dtype: string - name: title dtype: string - name: source_link dtype: string - name: description dtype: string - name: date dtype: timestamp[ns] - name: image dtype: string - name: image_caption dtype: string - name: category dtype: string splits: - name: train num_bytes: 849591 num_examples: 143 download_size: 534513 dataset_size: 849591 --- # Dataset Card for "stripes" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7109739184379578, -0.13575944304466248, 0.3276408910751343, 0.33754462003707886, -0.3843163549900055, 0.2775147557258606, 0.4708556830883026, -0.3704468309879303, 1.1450635194778442, 0.6847497820854187, -0.8951026797294617, -0.7842528223991394, -0.4223686754703522, -0.5079256296157837, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
hvpd/temp
hvpd
2023-11-14T11:07:42Z
0
0
null
[ "license:unknown", "region:us" ]
2023-11-14T11:07:42Z
2023-11-12T16:16:34.000Z
2023-11-12T16:16:34
--- license: unknown ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
malucoelhaofc/ScottTenormanEnglish
malucoelhaofc
2023-11-12T17:58:42Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-12T17:58:42Z
2023-11-12T16:22:38.000Z
2023-11-12T16:22:38
--- license: openrail ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
Kaue123456/JuniorRangelPatata
Kaue123456
2023-11-12T16:35:02Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-12T16:35:02Z
2023-11-12T16:33:09.000Z
2023-11-12T16:33:09
--- license: openrail ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
matrix-multiply/BeyondSimulatedDrivers
matrix-multiply
2023-11-14T14:41:57Z
0
0
null
[ "license:mit", "region:us" ]
2023-11-14T14:41:57Z
2023-11-12T16:35:35.000Z
2023-11-12T16:35:35
--- license: mit ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
sdadasfgdfgfdg/sasukemodel
sdadasfgdfgfdg
2023-11-12T16:40:57Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-12T16:40:57Z
2023-11-12T16:40:07.000Z
2023-11-12T16:40:07
--- license: openrail ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
Pablao0948/Milhouse
Pablao0948
2023-11-12T17:08:19Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-12T17:08:19Z
2023-11-12T17:07:45.000Z
2023-11-12T17:07:45
--- license: openrail ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
Kyle1668/LLM-TTA-Cached-Rewrites
Kyle1668
2023-11-28T20:47:37Z
0
0
null
[ "region:us" ]
2023-11-28T20:47:37Z
2023-11-12T17:49:43.000Z
2023-11-12T17:49:43
--- configs: - config_name: default data_files: - split: boss_sentiment_stabilityai_StableBeluga_13B_tempequals0dot0 path: data/boss_sentiment_stabilityai_StableBeluga_13B_tempequals0dot0-* - split: boss_sentiment_stabilityai_StableBeluga_7b_tempequals0dot0 path: data/boss_sentiment_stabilityai_StableBeluga_7b_tempequals0dot0-* - split: boss_toxicity_aug_back_translate path: data/boss_toxicity_aug_back_translate-* - split: boss_toxicity_stabilityai_StableBeluga_7b_tempequals0dot0 path: data/boss_toxicity_stabilityai_StableBeluga_7b_tempequals0dot0-* - split: boss_sentiment_aug_back_translate path: data/boss_sentiment_aug_back_translate-* - split: boss_toxicity_aug_substitute path: data/boss_toxicity_aug_substitute-* - split: boss_sentiment_aug_substitute path: data/boss_sentiment_aug_substitute-* - split: ag_news_twitter_stabilityai_StableBeluga_7b_tempequals0dot0 path: data/ag_news_twitter_stabilityai_StableBeluga_7b_tempequals0dot0-* - split: ag_news_twitter_aug_insert path: data/ag_news_twitter_aug_insert-* - split: ag_news_twitter_aug_substitute path: data/ag_news_twitter_aug_substitute-* - split: boss_toxicity_aug_insert path: data/boss_toxicity_aug_insert-* - split: ag_news_twitter_aug_back_translate path: data/ag_news_twitter_aug_back_translate-* - split: boss_sentiment_aug_insert path: data/boss_sentiment_aug_insert-* dataset_info: features: - name: prompt_hash dtype: string - name: prompt dtype: string - name: rewrites sequence: string splits: - name: boss_sentiment_stabilityai_StableBeluga_13B_tempequals0dot0 num_bytes: 3481889 num_examples: 2132 - name: boss_sentiment_stabilityai_StableBeluga_7b_tempequals0dot0 num_bytes: 262454218 num_examples: 123097 - name: boss_toxicity_aug_back_translate num_bytes: 149835802 num_examples: 116801 - name: boss_toxicity_stabilityai_StableBeluga_7b_tempequals0dot0 num_bytes: 334255552 num_examples: 90326 - name: boss_sentiment_aug_back_translate num_bytes: 61495482 num_examples: 61580 - name: boss_toxicity_aug_substitute num_bytes: 168666583 num_examples: 120032 - name: boss_sentiment_aug_substitute num_bytes: 76973335 num_examples: 61580 - name: ag_news_twitter_stabilityai_StableBeluga_7b_tempequals0dot0 num_bytes: 76061269 num_examples: 30400 - name: ag_news_twitter_aug_insert num_bytes: 22409159 num_examples: 15200 - name: ag_news_twitter_aug_substitute num_bytes: 18601890 num_examples: 15200 - name: boss_toxicity_aug_insert num_bytes: 190629217 num_examples: 120032 - name: ag_news_twitter_aug_back_translate num_bytes: 17617207 num_examples: 15200 - name: boss_sentiment_aug_insert num_bytes: 87047048 num_examples: 61580 download_size: 393146130 dataset_size: 1469528651 --- # Dataset Card for "LLM-TTA-Cached-Rewrites" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.17310675978660583, -0.608298122882843, 0.4077690541744232, 0.25082364678382874, -0.5028505325317383, 0.3480721712112427, 0.3190561830997467, 0.0008956932579167187, 1.1538994312286377, 0.7232703566551208, -1.0607703924179077, -0.8021167516708374, -0.5202480554580688, 0.015290706418454647...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_revolutionarybukhari__Llama-2-7b-chat-finetune-AUTOMATE_public
open-llm-leaderboard
2023-11-12T17:55:40Z
0
0
null
[ "region:us" ]
2023-11-12T17:55:40Z
2023-11-12T17:54:40.000Z
2023-11-12T17:54:40
--- pretty_name: Evaluation run of revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE](https://huggingface.co/revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_revolutionarybukhari__Llama-2-7b-chat-finetune-AUTOMATE_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-12T17:51:35.598056](https://huggingface.co/datasets/open-llm-leaderboard/details_revolutionarybukhari__Llama-2-7b-chat-finetune-AUTOMATE_public/blob/main/results_2023-11-12T17-51-35.598056.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.48664997345720373,\n\ \ \"acc_stderr\": 0.03427289794847252,\n \"acc_norm\": 0.4932299888431757,\n\ \ \"acc_norm_stderr\": 0.03508094254293674,\n \"mc1\": 0.2876376988984088,\n\ \ \"mc1_stderr\": 0.015846315101394812,\n \"mc2\": 0.44729919889234016,\n\ \ \"mc2_stderr\": 0.015286276115878357,\n \"em\": 0.010906040268456376,\n\ \ \"em_stderr\": 0.0010636334198498001,\n \"f1\": 0.06768770973154396,\n\ \ \"f1_stderr\": 0.0017077194500790263\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.4906143344709898,\n \"acc_stderr\": 0.014608816322065,\n\ \ \"acc_norm\": 0.5307167235494881,\n \"acc_norm_stderr\": 0.014583792546304037\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5622385978888668,\n\ \ \"acc_stderr\": 0.004950973231188739,\n \"acc_norm\": 0.7559251145190201,\n\ \ \"acc_norm_stderr\": 0.004286594977390899\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \ \ \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.42962962962962964,\n\ \ \"acc_stderr\": 0.04276349494376599,\n \"acc_norm\": 0.42962962962962964,\n\ \ \"acc_norm_stderr\": 0.04276349494376599\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.46710526315789475,\n \"acc_stderr\": 0.040601270352363966,\n\ \ \"acc_norm\": 0.46710526315789475,\n \"acc_norm_stderr\": 0.040601270352363966\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.53,\n\ \ \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \ \ \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.539622641509434,\n \"acc_stderr\": 0.030676096599389184,\n\ \ \"acc_norm\": 0.539622641509434,\n \"acc_norm_stderr\": 0.030676096599389184\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.5347222222222222,\n\ \ \"acc_stderr\": 0.04171115858181618,\n \"acc_norm\": 0.5347222222222222,\n\ \ \"acc_norm_stderr\": 0.04171115858181618\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \ \ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n\ \ \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.4046242774566474,\n\ \ \"acc_stderr\": 0.03742461193887248,\n \"acc_norm\": 0.4046242774566474,\n\ \ \"acc_norm_stderr\": 0.03742461193887248\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.041583075330832865,\n\ \ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.041583075330832865\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n\ \ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.42127659574468085,\n \"acc_stderr\": 0.03227834510146268,\n\ \ \"acc_norm\": 0.42127659574468085,\n \"acc_norm_stderr\": 0.03227834510146268\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.37719298245614036,\n\ \ \"acc_stderr\": 0.045595221419582166,\n \"acc_norm\": 0.37719298245614036,\n\ \ \"acc_norm_stderr\": 0.045595221419582166\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.503448275862069,\n \"acc_stderr\": 0.041665675771015785,\n\ \ \"acc_norm\": 0.503448275862069,\n \"acc_norm_stderr\": 0.041665675771015785\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.2962962962962963,\n \"acc_stderr\": 0.023517294335963286,\n \"\ acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.023517294335963286\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.24603174603174602,\n\ \ \"acc_stderr\": 0.03852273364924314,\n \"acc_norm\": 0.24603174603174602,\n\ \ \"acc_norm_stderr\": 0.03852273364924314\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \ \ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.532258064516129,\n \"acc_stderr\": 0.028384747788813332,\n \"\ acc_norm\": 0.532258064516129,\n \"acc_norm_stderr\": 0.028384747788813332\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.35960591133004927,\n \"acc_stderr\": 0.03376458246509566,\n \"\ acc_norm\": 0.35960591133004927,\n \"acc_norm_stderr\": 0.03376458246509566\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\"\ : 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.5696969696969697,\n \"acc_stderr\": 0.03866225962879077,\n\ \ \"acc_norm\": 0.5696969696969697,\n \"acc_norm_stderr\": 0.03866225962879077\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.5909090909090909,\n \"acc_stderr\": 0.03502975799413007,\n \"\ acc_norm\": 0.5909090909090909,\n \"acc_norm_stderr\": 0.03502975799413007\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7098445595854922,\n \"acc_stderr\": 0.03275264467791516,\n\ \ \"acc_norm\": 0.7098445595854922,\n \"acc_norm_stderr\": 0.03275264467791516\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.4230769230769231,\n \"acc_stderr\": 0.02504919787604234,\n \ \ \"acc_norm\": 0.4230769230769231,\n \"acc_norm_stderr\": 0.02504919787604234\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.26666666666666666,\n \"acc_stderr\": 0.02696242432507383,\n \ \ \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.02696242432507383\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.4369747899159664,\n \"acc_stderr\": 0.03221943636566196,\n \ \ \"acc_norm\": 0.4369747899159664,\n \"acc_norm_stderr\": 0.03221943636566196\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\"\ : 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n\ \ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.671559633027523,\n\ \ \"acc_stderr\": 0.02013590279729841,\n \"acc_norm\": 0.671559633027523,\n\ \ \"acc_norm_stderr\": 0.02013590279729841\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\ : {\n \"acc\": 0.3287037037037037,\n \"acc_stderr\": 0.032036140846700596,\n\ \ \"acc_norm\": 0.3287037037037037,\n \"acc_norm_stderr\": 0.032036140846700596\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.6813725490196079,\n \"acc_stderr\": 0.032702871814820796,\n \"\ acc_norm\": 0.6813725490196079,\n \"acc_norm_stderr\": 0.032702871814820796\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.6708860759493671,\n \"acc_stderr\": 0.03058732629470237,\n \ \ \"acc_norm\": 0.6708860759493671,\n \"acc_norm_stderr\": 0.03058732629470237\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.5874439461883408,\n\ \ \"acc_stderr\": 0.03304062175449297,\n \"acc_norm\": 0.5874439461883408,\n\ \ \"acc_norm_stderr\": 0.03304062175449297\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.5877862595419847,\n \"acc_stderr\": 0.04317171194870255,\n\ \ \"acc_norm\": 0.5877862595419847,\n \"acc_norm_stderr\": 0.04317171194870255\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.6446280991735537,\n \"acc_stderr\": 0.0436923632657398,\n \"acc_norm\"\ : 0.6446280991735537,\n \"acc_norm_stderr\": 0.0436923632657398\n },\n\ \ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6111111111111112,\n\ \ \"acc_stderr\": 0.04712821257426769,\n \"acc_norm\": 0.6111111111111112,\n\ \ \"acc_norm_stderr\": 0.04712821257426769\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.5521472392638037,\n \"acc_stderr\": 0.03906947479456606,\n\ \ \"acc_norm\": 0.5521472392638037,\n \"acc_norm_stderr\": 0.03906947479456606\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3482142857142857,\n\ \ \"acc_stderr\": 0.04521829902833586,\n \"acc_norm\": 0.3482142857142857,\n\ \ \"acc_norm_stderr\": 0.04521829902833586\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6893203883495146,\n \"acc_stderr\": 0.045821241601615506,\n\ \ \"acc_norm\": 0.6893203883495146,\n \"acc_norm_stderr\": 0.045821241601615506\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7222222222222222,\n\ \ \"acc_stderr\": 0.02934311479809446,\n \"acc_norm\": 0.7222222222222222,\n\ \ \"acc_norm_stderr\": 0.02934311479809446\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \ \ \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.6704980842911877,\n\ \ \"acc_stderr\": 0.01680832226174046,\n \"acc_norm\": 0.6704980842911877,\n\ \ \"acc_norm_stderr\": 0.01680832226174046\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5202312138728323,\n \"acc_stderr\": 0.026897049996382875,\n\ \ \"acc_norm\": 0.5202312138728323,\n \"acc_norm_stderr\": 0.026897049996382875\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2223463687150838,\n\ \ \"acc_stderr\": 0.013907189208156881,\n \"acc_norm\": 0.2223463687150838,\n\ \ \"acc_norm_stderr\": 0.013907189208156881\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5098039215686274,\n \"acc_stderr\": 0.028624412550167958,\n\ \ \"acc_norm\": 0.5098039215686274,\n \"acc_norm_stderr\": 0.028624412550167958\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.572347266881029,\n\ \ \"acc_stderr\": 0.02809924077580956,\n \"acc_norm\": 0.572347266881029,\n\ \ \"acc_norm_stderr\": 0.02809924077580956\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.027513747284379428,\n\ \ \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.027513747284379428\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.3723404255319149,\n \"acc_stderr\": 0.02883892147125146,\n \ \ \"acc_norm\": 0.3723404255319149,\n \"acc_norm_stderr\": 0.02883892147125146\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.34615384615384615,\n\ \ \"acc_stderr\": 0.012150699768228556,\n \"acc_norm\": 0.34615384615384615,\n\ \ \"acc_norm_stderr\": 0.012150699768228556\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.4742647058823529,\n \"acc_stderr\": 0.03033257809455504,\n\ \ \"acc_norm\": 0.4742647058823529,\n \"acc_norm_stderr\": 0.03033257809455504\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.4869281045751634,\n \"acc_stderr\": 0.020220920829626916,\n \ \ \"acc_norm\": 0.4869281045751634,\n \"acc_norm_stderr\": 0.020220920829626916\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5272727272727272,\n\ \ \"acc_stderr\": 0.04782001791380061,\n \"acc_norm\": 0.5272727272727272,\n\ \ \"acc_norm_stderr\": 0.04782001791380061\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5306122448979592,\n \"acc_stderr\": 0.031949171367580624,\n\ \ \"acc_norm\": 0.5306122448979592,\n \"acc_norm_stderr\": 0.031949171367580624\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6517412935323383,\n\ \ \"acc_stderr\": 0.033687874661154596,\n \"acc_norm\": 0.6517412935323383,\n\ \ \"acc_norm_stderr\": 0.033687874661154596\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \ \ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.41566265060240964,\n\ \ \"acc_stderr\": 0.03836722176598052,\n \"acc_norm\": 0.41566265060240964,\n\ \ \"acc_norm_stderr\": 0.03836722176598052\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7251461988304093,\n \"acc_stderr\": 0.034240429246915824,\n\ \ \"acc_norm\": 0.7251461988304093,\n \"acc_norm_stderr\": 0.034240429246915824\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2876376988984088,\n\ \ \"mc1_stderr\": 0.015846315101394812,\n \"mc2\": 0.44729919889234016,\n\ \ \"mc2_stderr\": 0.015286276115878357\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7324388318863457,\n \"acc_stderr\": 0.01244171845689301\n\ \ },\n \"harness|drop|3\": {\n \"em\": 0.010906040268456376,\n \ \ \"em_stderr\": 0.0010636334198498001,\n \"f1\": 0.06768770973154396,\n\ \ \"f1_stderr\": 0.0017077194500790263\n },\n \"harness|gsm8k|5\":\ \ {\n \"acc\": 0.08642911296436695,\n \"acc_stderr\": 0.007740044337103787\n\ \ }\n}\n```" repo_url: https://huggingface.co/revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|arc:challenge|25_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-12T17-51-35.598056.parquet' - config_name: harness_drop_3 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|drop|3_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-12T17-51-35.598056.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|gsm8k|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hellaswag|10_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T17-51-35.598056.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T17-51-35.598056.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T17-51-35.598056.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_12T17_51_35.598056 path: - '**/details_harness|winogrande|5_2023-11-12T17-51-35.598056.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-12T17-51-35.598056.parquet' - config_name: results data_files: - split: 2023_11_12T17_51_35.598056 path: - results_2023-11-12T17-51-35.598056.parquet - split: latest path: - results_2023-11-12T17-51-35.598056.parquet --- # Dataset Card for Evaluation run of revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE](https://huggingface.co/revolutionarybukhari/Llama-2-7b-chat-finetune-AUTOMATE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_revolutionarybukhari__Llama-2-7b-chat-finetune-AUTOMATE_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-12T17:51:35.598056](https://huggingface.co/datasets/open-llm-leaderboard/details_revolutionarybukhari__Llama-2-7b-chat-finetune-AUTOMATE_public/blob/main/results_2023-11-12T17-51-35.598056.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.48664997345720373, "acc_stderr": 0.03427289794847252, "acc_norm": 0.4932299888431757, "acc_norm_stderr": 0.03508094254293674, "mc1": 0.2876376988984088, "mc1_stderr": 0.015846315101394812, "mc2": 0.44729919889234016, "mc2_stderr": 0.015286276115878357, "em": 0.010906040268456376, "em_stderr": 0.0010636334198498001, "f1": 0.06768770973154396, "f1_stderr": 0.0017077194500790263 }, "harness|arc:challenge|25": { "acc": 0.4906143344709898, "acc_stderr": 0.014608816322065, "acc_norm": 0.5307167235494881, "acc_norm_stderr": 0.014583792546304037 }, "harness|hellaswag|10": { "acc": 0.5622385978888668, "acc_stderr": 0.004950973231188739, "acc_norm": 0.7559251145190201, "acc_norm_stderr": 0.004286594977390899 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.42962962962962964, "acc_stderr": 0.04276349494376599, "acc_norm": 0.42962962962962964, "acc_norm_stderr": 0.04276349494376599 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.46710526315789475, "acc_stderr": 0.040601270352363966, "acc_norm": 0.46710526315789475, "acc_norm_stderr": 0.040601270352363966 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.539622641509434, "acc_stderr": 0.030676096599389184, "acc_norm": 0.539622641509434, "acc_norm_stderr": 0.030676096599389184 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.5347222222222222, "acc_stderr": 0.04171115858181618, "acc_norm": 0.5347222222222222, "acc_norm_stderr": 0.04171115858181618 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.4046242774566474, "acc_stderr": 0.03742461193887248, "acc_norm": 0.4046242774566474, "acc_norm_stderr": 0.03742461193887248 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.22549019607843138, "acc_stderr": 0.041583075330832865, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.041583075330832865 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.42127659574468085, "acc_stderr": 0.03227834510146268, "acc_norm": 0.42127659574468085, "acc_norm_stderr": 0.03227834510146268 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.37719298245614036, "acc_stderr": 0.045595221419582166, "acc_norm": 0.37719298245614036, "acc_norm_stderr": 0.045595221419582166 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.503448275862069, "acc_stderr": 0.041665675771015785, "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.041665675771015785 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.023517294335963286, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.023517294335963286 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.24603174603174602, "acc_stderr": 0.03852273364924314, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.03852273364924314 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.532258064516129, "acc_stderr": 0.028384747788813332, "acc_norm": 0.532258064516129, "acc_norm_stderr": 0.028384747788813332 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.35960591133004927, "acc_stderr": 0.03376458246509566, "acc_norm": 0.35960591133004927, "acc_norm_stderr": 0.03376458246509566 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5696969696969697, "acc_stderr": 0.03866225962879077, "acc_norm": 0.5696969696969697, "acc_norm_stderr": 0.03866225962879077 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5909090909090909, "acc_stderr": 0.03502975799413007, "acc_norm": 0.5909090909090909, "acc_norm_stderr": 0.03502975799413007 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7098445595854922, "acc_stderr": 0.03275264467791516, "acc_norm": 0.7098445595854922, "acc_norm_stderr": 0.03275264467791516 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4230769230769231, "acc_stderr": 0.02504919787604234, "acc_norm": 0.4230769230769231, "acc_norm_stderr": 0.02504919787604234 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.02696242432507383, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.02696242432507383 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.4369747899159664, "acc_stderr": 0.03221943636566196, "acc_norm": 0.4369747899159664, "acc_norm_stderr": 0.03221943636566196 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.671559633027523, "acc_stderr": 0.02013590279729841, "acc_norm": 0.671559633027523, "acc_norm_stderr": 0.02013590279729841 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3287037037037037, "acc_stderr": 0.032036140846700596, "acc_norm": 0.3287037037037037, "acc_norm_stderr": 0.032036140846700596 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.6813725490196079, "acc_stderr": 0.032702871814820796, "acc_norm": 0.6813725490196079, "acc_norm_stderr": 0.032702871814820796 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6708860759493671, "acc_stderr": 0.03058732629470237, "acc_norm": 0.6708860759493671, "acc_norm_stderr": 0.03058732629470237 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.5874439461883408, "acc_stderr": 0.03304062175449297, "acc_norm": 0.5874439461883408, "acc_norm_stderr": 0.03304062175449297 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5877862595419847, "acc_stderr": 0.04317171194870255, "acc_norm": 0.5877862595419847, "acc_norm_stderr": 0.04317171194870255 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6446280991735537, "acc_stderr": 0.0436923632657398, "acc_norm": 0.6446280991735537, "acc_norm_stderr": 0.0436923632657398 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6111111111111112, "acc_stderr": 0.04712821257426769, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.04712821257426769 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.5521472392638037, "acc_stderr": 0.03906947479456606, "acc_norm": 0.5521472392638037, "acc_norm_stderr": 0.03906947479456606 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3482142857142857, "acc_stderr": 0.04521829902833586, "acc_norm": 0.3482142857142857, "acc_norm_stderr": 0.04521829902833586 }, "harness|hendrycksTest-management|5": { "acc": 0.6893203883495146, "acc_stderr": 0.045821241601615506, "acc_norm": 0.6893203883495146, "acc_norm_stderr": 0.045821241601615506 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7222222222222222, "acc_stderr": 0.02934311479809446, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.02934311479809446 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.6704980842911877, "acc_stderr": 0.01680832226174046, "acc_norm": 0.6704980842911877, "acc_norm_stderr": 0.01680832226174046 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5202312138728323, "acc_stderr": 0.026897049996382875, "acc_norm": 0.5202312138728323, "acc_norm_stderr": 0.026897049996382875 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2223463687150838, "acc_stderr": 0.013907189208156881, "acc_norm": 0.2223463687150838, "acc_norm_stderr": 0.013907189208156881 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5098039215686274, "acc_stderr": 0.028624412550167958, "acc_norm": 0.5098039215686274, "acc_norm_stderr": 0.028624412550167958 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.572347266881029, "acc_stderr": 0.02809924077580956, "acc_norm": 0.572347266881029, "acc_norm_stderr": 0.02809924077580956 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5740740740740741, "acc_stderr": 0.027513747284379428, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.027513747284379428 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3723404255319149, "acc_stderr": 0.02883892147125146, "acc_norm": 0.3723404255319149, "acc_norm_stderr": 0.02883892147125146 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.34615384615384615, "acc_stderr": 0.012150699768228556, "acc_norm": 0.34615384615384615, "acc_norm_stderr": 0.012150699768228556 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4742647058823529, "acc_stderr": 0.03033257809455504, "acc_norm": 0.4742647058823529, "acc_norm_stderr": 0.03033257809455504 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4869281045751634, "acc_stderr": 0.020220920829626916, "acc_norm": 0.4869281045751634, "acc_norm_stderr": 0.020220920829626916 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5272727272727272, "acc_stderr": 0.04782001791380061, "acc_norm": 0.5272727272727272, "acc_norm_stderr": 0.04782001791380061 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5306122448979592, "acc_stderr": 0.031949171367580624, "acc_norm": 0.5306122448979592, "acc_norm_stderr": 0.031949171367580624 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6517412935323383, "acc_stderr": 0.033687874661154596, "acc_norm": 0.6517412935323383, "acc_norm_stderr": 0.033687874661154596 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-virology|5": { "acc": 0.41566265060240964, "acc_stderr": 0.03836722176598052, "acc_norm": 0.41566265060240964, "acc_norm_stderr": 0.03836722176598052 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7251461988304093, "acc_stderr": 0.034240429246915824, "acc_norm": 0.7251461988304093, "acc_norm_stderr": 0.034240429246915824 }, "harness|truthfulqa:mc|0": { "mc1": 0.2876376988984088, "mc1_stderr": 0.015846315101394812, "mc2": 0.44729919889234016, "mc2_stderr": 0.015286276115878357 }, "harness|winogrande|5": { "acc": 0.7324388318863457, "acc_stderr": 0.01244171845689301 }, "harness|drop|3": { "em": 0.010906040268456376, "em_stderr": 0.0010636334198498001, "f1": 0.06768770973154396, "f1_stderr": 0.0017077194500790263 }, "harness|gsm8k|5": { "acc": 0.08642911296436695, "acc_stderr": 0.007740044337103787 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7103564739227295, -0.9015923142433167, 0.23576615750789642, 0.2283712476491928, -0.18927030265331268, -0.00860503874719143, 0.006226790603250265, -0.2155311405658722, 0.5696914792060852, -0.029724350199103355, -0.5033058524131775, -0.6728972792625427, -0.4398886561393738, 0.232522994279...
null
null
null
null
null
null
null
null
null
null
null
null
null
BangumiBase/gintama
BangumiBase
2023-11-13T10:01:43Z
0
0
null
[ "size_categories:10K<n<100K", "license:mit", "art", "region:us" ]
2023-11-13T10:01:43Z
2023-11-12T18:12:25.000Z
2023-11-12T18:12:25
--- license: mit tags: - art size_categories: - 10K<n<100K --- # Bangumi Image Base of Gintama This is the image base of bangumi Gintama, we detected 225 characters, 31678 images in total. The full dataset is [here](all.zip). **Please note that these image bases are not guaranteed to be 100% cleaned, they may be noisy actual.** If you intend to manually train models using this dataset, we recommend performing necessary preprocessing on the downloaded dataset to eliminate potential noisy samples (approximately 1% probability). Here is the characters' preview: | # | Images | Download | Preview 1 | Preview 2 | Preview 3 | Preview 4 | Preview 5 | Preview 6 | Preview 7 | Preview 8 | |:------|---------:|:----------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------|:--------------------------------| | 0 | 2887 | [Download](0/dataset.zip) | ![preview 1](0/preview_1.png) | ![preview 2](0/preview_2.png) | ![preview 3](0/preview_3.png) | ![preview 4](0/preview_4.png) | ![preview 5](0/preview_5.png) | ![preview 6](0/preview_6.png) | ![preview 7](0/preview_7.png) | ![preview 8](0/preview_8.png) | | 1 | 268 | [Download](1/dataset.zip) | ![preview 1](1/preview_1.png) | ![preview 2](1/preview_2.png) | ![preview 3](1/preview_3.png) | ![preview 4](1/preview_4.png) | ![preview 5](1/preview_5.png) | ![preview 6](1/preview_6.png) | ![preview 7](1/preview_7.png) | ![preview 8](1/preview_8.png) | | 2 | 32 | [Download](2/dataset.zip) | ![preview 1](2/preview_1.png) | ![preview 2](2/preview_2.png) | ![preview 3](2/preview_3.png) | ![preview 4](2/preview_4.png) | ![preview 5](2/preview_5.png) | ![preview 6](2/preview_6.png) | ![preview 7](2/preview_7.png) | ![preview 8](2/preview_8.png) | | 3 | 1069 | [Download](3/dataset.zip) | ![preview 1](3/preview_1.png) | ![preview 2](3/preview_2.png) | ![preview 3](3/preview_3.png) | ![preview 4](3/preview_4.png) | ![preview 5](3/preview_5.png) | ![preview 6](3/preview_6.png) | ![preview 7](3/preview_7.png) | ![preview 8](3/preview_8.png) | | 4 | 210 | [Download](4/dataset.zip) | ![preview 1](4/preview_1.png) | ![preview 2](4/preview_2.png) | ![preview 3](4/preview_3.png) | ![preview 4](4/preview_4.png) | ![preview 5](4/preview_5.png) | ![preview 6](4/preview_6.png) | ![preview 7](4/preview_7.png) | ![preview 8](4/preview_8.png) | | 5 | 181 | [Download](5/dataset.zip) | ![preview 1](5/preview_1.png) | ![preview 2](5/preview_2.png) | ![preview 3](5/preview_3.png) | ![preview 4](5/preview_4.png) | ![preview 5](5/preview_5.png) | ![preview 6](5/preview_6.png) | ![preview 7](5/preview_7.png) | ![preview 8](5/preview_8.png) | | 6 | 464 | [Download](6/dataset.zip) | ![preview 1](6/preview_1.png) | ![preview 2](6/preview_2.png) | ![preview 3](6/preview_3.png) | ![preview 4](6/preview_4.png) | ![preview 5](6/preview_5.png) | ![preview 6](6/preview_6.png) | ![preview 7](6/preview_7.png) | ![preview 8](6/preview_8.png) | | 7 | 131 | [Download](7/dataset.zip) | ![preview 1](7/preview_1.png) | ![preview 2](7/preview_2.png) | ![preview 3](7/preview_3.png) | ![preview 4](7/preview_4.png) | ![preview 5](7/preview_5.png) | ![preview 6](7/preview_6.png) | ![preview 7](7/preview_7.png) | ![preview 8](7/preview_8.png) | | 8 | 118 | [Download](8/dataset.zip) | ![preview 1](8/preview_1.png) | ![preview 2](8/preview_2.png) | ![preview 3](8/preview_3.png) | ![preview 4](8/preview_4.png) | ![preview 5](8/preview_5.png) | ![preview 6](8/preview_6.png) | ![preview 7](8/preview_7.png) | ![preview 8](8/preview_8.png) | | 9 | 115 | [Download](9/dataset.zip) | ![preview 1](9/preview_1.png) | ![preview 2](9/preview_2.png) | ![preview 3](9/preview_3.png) | ![preview 4](9/preview_4.png) | ![preview 5](9/preview_5.png) | ![preview 6](9/preview_6.png) | ![preview 7](9/preview_7.png) | ![preview 8](9/preview_8.png) | | 10 | 148 | [Download](10/dataset.zip) | ![preview 1](10/preview_1.png) | ![preview 2](10/preview_2.png) | ![preview 3](10/preview_3.png) | ![preview 4](10/preview_4.png) | ![preview 5](10/preview_5.png) | ![preview 6](10/preview_6.png) | ![preview 7](10/preview_7.png) | ![preview 8](10/preview_8.png) | | 11 | 334 | [Download](11/dataset.zip) | ![preview 1](11/preview_1.png) | ![preview 2](11/preview_2.png) | ![preview 3](11/preview_3.png) | ![preview 4](11/preview_4.png) | ![preview 5](11/preview_5.png) | ![preview 6](11/preview_6.png) | ![preview 7](11/preview_7.png) | ![preview 8](11/preview_8.png) | | 12 | 97 | [Download](12/dataset.zip) | ![preview 1](12/preview_1.png) | ![preview 2](12/preview_2.png) | ![preview 3](12/preview_3.png) | ![preview 4](12/preview_4.png) | ![preview 5](12/preview_5.png) | ![preview 6](12/preview_6.png) | ![preview 7](12/preview_7.png) | ![preview 8](12/preview_8.png) | | 13 | 171 | [Download](13/dataset.zip) | ![preview 1](13/preview_1.png) | ![preview 2](13/preview_2.png) | ![preview 3](13/preview_3.png) | ![preview 4](13/preview_4.png) | ![preview 5](13/preview_5.png) | ![preview 6](13/preview_6.png) | ![preview 7](13/preview_7.png) | ![preview 8](13/preview_8.png) | | 14 | 2819 | [Download](14/dataset.zip) | ![preview 1](14/preview_1.png) | ![preview 2](14/preview_2.png) | ![preview 3](14/preview_3.png) | ![preview 4](14/preview_4.png) | ![preview 5](14/preview_5.png) | ![preview 6](14/preview_6.png) | ![preview 7](14/preview_7.png) | ![preview 8](14/preview_8.png) | | 15 | 1531 | [Download](15/dataset.zip) | ![preview 1](15/preview_1.png) | ![preview 2](15/preview_2.png) | ![preview 3](15/preview_3.png) | ![preview 4](15/preview_4.png) | ![preview 5](15/preview_5.png) | ![preview 6](15/preview_6.png) | ![preview 7](15/preview_7.png) | ![preview 8](15/preview_8.png) | | 16 | 445 | [Download](16/dataset.zip) | ![preview 1](16/preview_1.png) | ![preview 2](16/preview_2.png) | ![preview 3](16/preview_3.png) | ![preview 4](16/preview_4.png) | ![preview 5](16/preview_5.png) | ![preview 6](16/preview_6.png) | ![preview 7](16/preview_7.png) | ![preview 8](16/preview_8.png) | | 17 | 876 | [Download](17/dataset.zip) | ![preview 1](17/preview_1.png) | ![preview 2](17/preview_2.png) | ![preview 3](17/preview_3.png) | ![preview 4](17/preview_4.png) | ![preview 5](17/preview_5.png) | ![preview 6](17/preview_6.png) | ![preview 7](17/preview_7.png) | ![preview 8](17/preview_8.png) | | 18 | 329 | [Download](18/dataset.zip) | ![preview 1](18/preview_1.png) | ![preview 2](18/preview_2.png) | ![preview 3](18/preview_3.png) | ![preview 4](18/preview_4.png) | ![preview 5](18/preview_5.png) | ![preview 6](18/preview_6.png) | ![preview 7](18/preview_7.png) | ![preview 8](18/preview_8.png) | | 19 | 60 | [Download](19/dataset.zip) | ![preview 1](19/preview_1.png) | ![preview 2](19/preview_2.png) | ![preview 3](19/preview_3.png) | ![preview 4](19/preview_4.png) | ![preview 5](19/preview_5.png) | ![preview 6](19/preview_6.png) | ![preview 7](19/preview_7.png) | ![preview 8](19/preview_8.png) | | 20 | 145 | [Download](20/dataset.zip) | ![preview 1](20/preview_1.png) | ![preview 2](20/preview_2.png) | ![preview 3](20/preview_3.png) | ![preview 4](20/preview_4.png) | ![preview 5](20/preview_5.png) | ![preview 6](20/preview_6.png) | ![preview 7](20/preview_7.png) | ![preview 8](20/preview_8.png) | | 21 | 213 | [Download](21/dataset.zip) | ![preview 1](21/preview_1.png) | ![preview 2](21/preview_2.png) | ![preview 3](21/preview_3.png) | ![preview 4](21/preview_4.png) | ![preview 5](21/preview_5.png) | ![preview 6](21/preview_6.png) | ![preview 7](21/preview_7.png) | ![preview 8](21/preview_8.png) | | 22 | 727 | [Download](22/dataset.zip) | ![preview 1](22/preview_1.png) | ![preview 2](22/preview_2.png) | ![preview 3](22/preview_3.png) | ![preview 4](22/preview_4.png) | ![preview 5](22/preview_5.png) | ![preview 6](22/preview_6.png) | ![preview 7](22/preview_7.png) | ![preview 8](22/preview_8.png) | | 23 | 284 | [Download](23/dataset.zip) | ![preview 1](23/preview_1.png) | ![preview 2](23/preview_2.png) | ![preview 3](23/preview_3.png) | ![preview 4](23/preview_4.png) | ![preview 5](23/preview_5.png) | ![preview 6](23/preview_6.png) | ![preview 7](23/preview_7.png) | ![preview 8](23/preview_8.png) | | 24 | 93 | [Download](24/dataset.zip) | ![preview 1](24/preview_1.png) | ![preview 2](24/preview_2.png) | ![preview 3](24/preview_3.png) | ![preview 4](24/preview_4.png) | ![preview 5](24/preview_5.png) | ![preview 6](24/preview_6.png) | ![preview 7](24/preview_7.png) | ![preview 8](24/preview_8.png) | | 25 | 72 | [Download](25/dataset.zip) | ![preview 1](25/preview_1.png) | ![preview 2](25/preview_2.png) | ![preview 3](25/preview_3.png) | ![preview 4](25/preview_4.png) | ![preview 5](25/preview_5.png) | ![preview 6](25/preview_6.png) | ![preview 7](25/preview_7.png) | ![preview 8](25/preview_8.png) | | 26 | 186 | [Download](26/dataset.zip) | ![preview 1](26/preview_1.png) | ![preview 2](26/preview_2.png) | ![preview 3](26/preview_3.png) | ![preview 4](26/preview_4.png) | ![preview 5](26/preview_5.png) | ![preview 6](26/preview_6.png) | ![preview 7](26/preview_7.png) | ![preview 8](26/preview_8.png) | | 27 | 76 | [Download](27/dataset.zip) | ![preview 1](27/preview_1.png) | ![preview 2](27/preview_2.png) | ![preview 3](27/preview_3.png) | ![preview 4](27/preview_4.png) | ![preview 5](27/preview_5.png) | ![preview 6](27/preview_6.png) | ![preview 7](27/preview_7.png) | ![preview 8](27/preview_8.png) | | 28 | 62 | [Download](28/dataset.zip) | ![preview 1](28/preview_1.png) | ![preview 2](28/preview_2.png) | ![preview 3](28/preview_3.png) | ![preview 4](28/preview_4.png) | ![preview 5](28/preview_5.png) | ![preview 6](28/preview_6.png) | ![preview 7](28/preview_7.png) | ![preview 8](28/preview_8.png) | | 29 | 55 | [Download](29/dataset.zip) | ![preview 1](29/preview_1.png) | ![preview 2](29/preview_2.png) | ![preview 3](29/preview_3.png) | ![preview 4](29/preview_4.png) | ![preview 5](29/preview_5.png) | ![preview 6](29/preview_6.png) | ![preview 7](29/preview_7.png) | ![preview 8](29/preview_8.png) | | 30 | 294 | [Download](30/dataset.zip) | ![preview 1](30/preview_1.png) | ![preview 2](30/preview_2.png) | ![preview 3](30/preview_3.png) | ![preview 4](30/preview_4.png) | ![preview 5](30/preview_5.png) | ![preview 6](30/preview_6.png) | ![preview 7](30/preview_7.png) | ![preview 8](30/preview_8.png) | | 31 | 45 | [Download](31/dataset.zip) | ![preview 1](31/preview_1.png) | ![preview 2](31/preview_2.png) | ![preview 3](31/preview_3.png) | ![preview 4](31/preview_4.png) | ![preview 5](31/preview_5.png) | ![preview 6](31/preview_6.png) | ![preview 7](31/preview_7.png) | ![preview 8](31/preview_8.png) | | 32 | 67 | [Download](32/dataset.zip) | ![preview 1](32/preview_1.png) | ![preview 2](32/preview_2.png) | ![preview 3](32/preview_3.png) | ![preview 4](32/preview_4.png) | ![preview 5](32/preview_5.png) | ![preview 6](32/preview_6.png) | ![preview 7](32/preview_7.png) | ![preview 8](32/preview_8.png) | | 33 | 42 | [Download](33/dataset.zip) | ![preview 1](33/preview_1.png) | ![preview 2](33/preview_2.png) | ![preview 3](33/preview_3.png) | ![preview 4](33/preview_4.png) | ![preview 5](33/preview_5.png) | ![preview 6](33/preview_6.png) | ![preview 7](33/preview_7.png) | ![preview 8](33/preview_8.png) | | 34 | 119 | [Download](34/dataset.zip) | ![preview 1](34/preview_1.png) | ![preview 2](34/preview_2.png) | ![preview 3](34/preview_3.png) | ![preview 4](34/preview_4.png) | ![preview 5](34/preview_5.png) | ![preview 6](34/preview_6.png) | ![preview 7](34/preview_7.png) | ![preview 8](34/preview_8.png) | | 35 | 49 | [Download](35/dataset.zip) | ![preview 1](35/preview_1.png) | ![preview 2](35/preview_2.png) | ![preview 3](35/preview_3.png) | ![preview 4](35/preview_4.png) | ![preview 5](35/preview_5.png) | ![preview 6](35/preview_6.png) | ![preview 7](35/preview_7.png) | ![preview 8](35/preview_8.png) | | 36 | 52 | [Download](36/dataset.zip) | ![preview 1](36/preview_1.png) | ![preview 2](36/preview_2.png) | ![preview 3](36/preview_3.png) | ![preview 4](36/preview_4.png) | ![preview 5](36/preview_5.png) | ![preview 6](36/preview_6.png) | ![preview 7](36/preview_7.png) | ![preview 8](36/preview_8.png) | | 37 | 45 | [Download](37/dataset.zip) | ![preview 1](37/preview_1.png) | ![preview 2](37/preview_2.png) | ![preview 3](37/preview_3.png) | ![preview 4](37/preview_4.png) | ![preview 5](37/preview_5.png) | ![preview 6](37/preview_6.png) | ![preview 7](37/preview_7.png) | ![preview 8](37/preview_8.png) | | 38 | 77 | [Download](38/dataset.zip) | ![preview 1](38/preview_1.png) | ![preview 2](38/preview_2.png) | ![preview 3](38/preview_3.png) | ![preview 4](38/preview_4.png) | ![preview 5](38/preview_5.png) | ![preview 6](38/preview_6.png) | ![preview 7](38/preview_7.png) | ![preview 8](38/preview_8.png) | | 39 | 37 | [Download](39/dataset.zip) | ![preview 1](39/preview_1.png) | ![preview 2](39/preview_2.png) | ![preview 3](39/preview_3.png) | ![preview 4](39/preview_4.png) | ![preview 5](39/preview_5.png) | ![preview 6](39/preview_6.png) | ![preview 7](39/preview_7.png) | ![preview 8](39/preview_8.png) | | 40 | 57 | [Download](40/dataset.zip) | ![preview 1](40/preview_1.png) | ![preview 2](40/preview_2.png) | ![preview 3](40/preview_3.png) | ![preview 4](40/preview_4.png) | ![preview 5](40/preview_5.png) | ![preview 6](40/preview_6.png) | ![preview 7](40/preview_7.png) | ![preview 8](40/preview_8.png) | | 41 | 51 | [Download](41/dataset.zip) | ![preview 1](41/preview_1.png) | ![preview 2](41/preview_2.png) | ![preview 3](41/preview_3.png) | ![preview 4](41/preview_4.png) | ![preview 5](41/preview_5.png) | ![preview 6](41/preview_6.png) | ![preview 7](41/preview_7.png) | ![preview 8](41/preview_8.png) | | 42 | 80 | [Download](42/dataset.zip) | ![preview 1](42/preview_1.png) | ![preview 2](42/preview_2.png) | ![preview 3](42/preview_3.png) | ![preview 4](42/preview_4.png) | ![preview 5](42/preview_5.png) | ![preview 6](42/preview_6.png) | ![preview 7](42/preview_7.png) | ![preview 8](42/preview_8.png) | | 43 | 128 | [Download](43/dataset.zip) | ![preview 1](43/preview_1.png) | ![preview 2](43/preview_2.png) | ![preview 3](43/preview_3.png) | ![preview 4](43/preview_4.png) | ![preview 5](43/preview_5.png) | ![preview 6](43/preview_6.png) | ![preview 7](43/preview_7.png) | ![preview 8](43/preview_8.png) | | 44 | 106 | [Download](44/dataset.zip) | ![preview 1](44/preview_1.png) | ![preview 2](44/preview_2.png) | ![preview 3](44/preview_3.png) | ![preview 4](44/preview_4.png) | ![preview 5](44/preview_5.png) | ![preview 6](44/preview_6.png) | ![preview 7](44/preview_7.png) | ![preview 8](44/preview_8.png) | | 45 | 42 | [Download](45/dataset.zip) | ![preview 1](45/preview_1.png) | ![preview 2](45/preview_2.png) | ![preview 3](45/preview_3.png) | ![preview 4](45/preview_4.png) | ![preview 5](45/preview_5.png) | ![preview 6](45/preview_6.png) | ![preview 7](45/preview_7.png) | ![preview 8](45/preview_8.png) | | 46 | 72 | [Download](46/dataset.zip) | ![preview 1](46/preview_1.png) | ![preview 2](46/preview_2.png) | ![preview 3](46/preview_3.png) | ![preview 4](46/preview_4.png) | ![preview 5](46/preview_5.png) | ![preview 6](46/preview_6.png) | ![preview 7](46/preview_7.png) | ![preview 8](46/preview_8.png) | | 47 | 22 | [Download](47/dataset.zip) | ![preview 1](47/preview_1.png) | ![preview 2](47/preview_2.png) | ![preview 3](47/preview_3.png) | ![preview 4](47/preview_4.png) | ![preview 5](47/preview_5.png) | ![preview 6](47/preview_6.png) | ![preview 7](47/preview_7.png) | ![preview 8](47/preview_8.png) | | 48 | 57 | [Download](48/dataset.zip) | ![preview 1](48/preview_1.png) | ![preview 2](48/preview_2.png) | ![preview 3](48/preview_3.png) | ![preview 4](48/preview_4.png) | ![preview 5](48/preview_5.png) | ![preview 6](48/preview_6.png) | ![preview 7](48/preview_7.png) | ![preview 8](48/preview_8.png) | | 49 | 43 | [Download](49/dataset.zip) | ![preview 1](49/preview_1.png) | ![preview 2](49/preview_2.png) | ![preview 3](49/preview_3.png) | ![preview 4](49/preview_4.png) | ![preview 5](49/preview_5.png) | ![preview 6](49/preview_6.png) | ![preview 7](49/preview_7.png) | ![preview 8](49/preview_8.png) | | 50 | 73 | [Download](50/dataset.zip) | ![preview 1](50/preview_1.png) | ![preview 2](50/preview_2.png) | ![preview 3](50/preview_3.png) | ![preview 4](50/preview_4.png) | ![preview 5](50/preview_5.png) | ![preview 6](50/preview_6.png) | ![preview 7](50/preview_7.png) | ![preview 8](50/preview_8.png) | | 51 | 67 | [Download](51/dataset.zip) | ![preview 1](51/preview_1.png) | ![preview 2](51/preview_2.png) | ![preview 3](51/preview_3.png) | ![preview 4](51/preview_4.png) | ![preview 5](51/preview_5.png) | ![preview 6](51/preview_6.png) | ![preview 7](51/preview_7.png) | ![preview 8](51/preview_8.png) | | 52 | 43 | [Download](52/dataset.zip) | ![preview 1](52/preview_1.png) | ![preview 2](52/preview_2.png) | ![preview 3](52/preview_3.png) | ![preview 4](52/preview_4.png) | ![preview 5](52/preview_5.png) | ![preview 6](52/preview_6.png) | ![preview 7](52/preview_7.png) | ![preview 8](52/preview_8.png) | | 53 | 690 | [Download](53/dataset.zip) | ![preview 1](53/preview_1.png) | ![preview 2](53/preview_2.png) | ![preview 3](53/preview_3.png) | ![preview 4](53/preview_4.png) | ![preview 5](53/preview_5.png) | ![preview 6](53/preview_6.png) | ![preview 7](53/preview_7.png) | ![preview 8](53/preview_8.png) | | 54 | 34 | [Download](54/dataset.zip) | ![preview 1](54/preview_1.png) | ![preview 2](54/preview_2.png) | ![preview 3](54/preview_3.png) | ![preview 4](54/preview_4.png) | ![preview 5](54/preview_5.png) | ![preview 6](54/preview_6.png) | ![preview 7](54/preview_7.png) | ![preview 8](54/preview_8.png) | | 55 | 33 | [Download](55/dataset.zip) | ![preview 1](55/preview_1.png) | ![preview 2](55/preview_2.png) | ![preview 3](55/preview_3.png) | ![preview 4](55/preview_4.png) | ![preview 5](55/preview_5.png) | ![preview 6](55/preview_6.png) | ![preview 7](55/preview_7.png) | ![preview 8](55/preview_8.png) | | 56 | 55 | [Download](56/dataset.zip) | ![preview 1](56/preview_1.png) | ![preview 2](56/preview_2.png) | ![preview 3](56/preview_3.png) | ![preview 4](56/preview_4.png) | ![preview 5](56/preview_5.png) | ![preview 6](56/preview_6.png) | ![preview 7](56/preview_7.png) | ![preview 8](56/preview_8.png) | | 57 | 4546 | [Download](57/dataset.zip) | ![preview 1](57/preview_1.png) | ![preview 2](57/preview_2.png) | ![preview 3](57/preview_3.png) | ![preview 4](57/preview_4.png) | ![preview 5](57/preview_5.png) | ![preview 6](57/preview_6.png) | ![preview 7](57/preview_7.png) | ![preview 8](57/preview_8.png) | | 58 | 145 | [Download](58/dataset.zip) | ![preview 1](58/preview_1.png) | ![preview 2](58/preview_2.png) | ![preview 3](58/preview_3.png) | ![preview 4](58/preview_4.png) | ![preview 5](58/preview_5.png) | ![preview 6](58/preview_6.png) | ![preview 7](58/preview_7.png) | ![preview 8](58/preview_8.png) | | 59 | 170 | [Download](59/dataset.zip) | ![preview 1](59/preview_1.png) | ![preview 2](59/preview_2.png) | ![preview 3](59/preview_3.png) | ![preview 4](59/preview_4.png) | ![preview 5](59/preview_5.png) | ![preview 6](59/preview_6.png) | ![preview 7](59/preview_7.png) | ![preview 8](59/preview_8.png) | | 60 | 28 | [Download](60/dataset.zip) | ![preview 1](60/preview_1.png) | ![preview 2](60/preview_2.png) | ![preview 3](60/preview_3.png) | ![preview 4](60/preview_4.png) | ![preview 5](60/preview_5.png) | ![preview 6](60/preview_6.png) | ![preview 7](60/preview_7.png) | ![preview 8](60/preview_8.png) | | 61 | 63 | [Download](61/dataset.zip) | ![preview 1](61/preview_1.png) | ![preview 2](61/preview_2.png) | ![preview 3](61/preview_3.png) | ![preview 4](61/preview_4.png) | ![preview 5](61/preview_5.png) | ![preview 6](61/preview_6.png) | ![preview 7](61/preview_7.png) | ![preview 8](61/preview_8.png) | | 62 | 41 | [Download](62/dataset.zip) | ![preview 1](62/preview_1.png) | ![preview 2](62/preview_2.png) | ![preview 3](62/preview_3.png) | ![preview 4](62/preview_4.png) | ![preview 5](62/preview_5.png) | ![preview 6](62/preview_6.png) | ![preview 7](62/preview_7.png) | ![preview 8](62/preview_8.png) | | 63 | 49 | [Download](63/dataset.zip) | ![preview 1](63/preview_1.png) | ![preview 2](63/preview_2.png) | ![preview 3](63/preview_3.png) | ![preview 4](63/preview_4.png) | ![preview 5](63/preview_5.png) | ![preview 6](63/preview_6.png) | ![preview 7](63/preview_7.png) | ![preview 8](63/preview_8.png) | | 64 | 36 | [Download](64/dataset.zip) | ![preview 1](64/preview_1.png) | ![preview 2](64/preview_2.png) | ![preview 3](64/preview_3.png) | ![preview 4](64/preview_4.png) | ![preview 5](64/preview_5.png) | ![preview 6](64/preview_6.png) | ![preview 7](64/preview_7.png) | ![preview 8](64/preview_8.png) | | 65 | 41 | [Download](65/dataset.zip) | ![preview 1](65/preview_1.png) | ![preview 2](65/preview_2.png) | ![preview 3](65/preview_3.png) | ![preview 4](65/preview_4.png) | ![preview 5](65/preview_5.png) | ![preview 6](65/preview_6.png) | ![preview 7](65/preview_7.png) | ![preview 8](65/preview_8.png) | | 66 | 25 | [Download](66/dataset.zip) | ![preview 1](66/preview_1.png) | ![preview 2](66/preview_2.png) | ![preview 3](66/preview_3.png) | ![preview 4](66/preview_4.png) | ![preview 5](66/preview_5.png) | ![preview 6](66/preview_6.png) | ![preview 7](66/preview_7.png) | ![preview 8](66/preview_8.png) | | 67 | 62 | [Download](67/dataset.zip) | ![preview 1](67/preview_1.png) | ![preview 2](67/preview_2.png) | ![preview 3](67/preview_3.png) | ![preview 4](67/preview_4.png) | ![preview 5](67/preview_5.png) | ![preview 6](67/preview_6.png) | ![preview 7](67/preview_7.png) | ![preview 8](67/preview_8.png) | | 68 | 39 | [Download](68/dataset.zip) | ![preview 1](68/preview_1.png) | ![preview 2](68/preview_2.png) | ![preview 3](68/preview_3.png) | ![preview 4](68/preview_4.png) | ![preview 5](68/preview_5.png) | ![preview 6](68/preview_6.png) | ![preview 7](68/preview_7.png) | ![preview 8](68/preview_8.png) | | 69 | 40 | [Download](69/dataset.zip) | ![preview 1](69/preview_1.png) | ![preview 2](69/preview_2.png) | ![preview 3](69/preview_3.png) | ![preview 4](69/preview_4.png) | ![preview 5](69/preview_5.png) | ![preview 6](69/preview_6.png) | ![preview 7](69/preview_7.png) | ![preview 8](69/preview_8.png) | | 70 | 28 | [Download](70/dataset.zip) | ![preview 1](70/preview_1.png) | ![preview 2](70/preview_2.png) | ![preview 3](70/preview_3.png) | ![preview 4](70/preview_4.png) | ![preview 5](70/preview_5.png) | ![preview 6](70/preview_6.png) | ![preview 7](70/preview_7.png) | ![preview 8](70/preview_8.png) | | 71 | 23 | [Download](71/dataset.zip) | ![preview 1](71/preview_1.png) | ![preview 2](71/preview_2.png) | ![preview 3](71/preview_3.png) | ![preview 4](71/preview_4.png) | ![preview 5](71/preview_5.png) | ![preview 6](71/preview_6.png) | ![preview 7](71/preview_7.png) | ![preview 8](71/preview_8.png) | | 72 | 96 | [Download](72/dataset.zip) | ![preview 1](72/preview_1.png) | ![preview 2](72/preview_2.png) | ![preview 3](72/preview_3.png) | ![preview 4](72/preview_4.png) | ![preview 5](72/preview_5.png) | ![preview 6](72/preview_6.png) | ![preview 7](72/preview_7.png) | ![preview 8](72/preview_8.png) | | 73 | 93 | [Download](73/dataset.zip) | ![preview 1](73/preview_1.png) | ![preview 2](73/preview_2.png) | ![preview 3](73/preview_3.png) | ![preview 4](73/preview_4.png) | ![preview 5](73/preview_5.png) | ![preview 6](73/preview_6.png) | ![preview 7](73/preview_7.png) | ![preview 8](73/preview_8.png) | | 74 | 61 | [Download](74/dataset.zip) | ![preview 1](74/preview_1.png) | ![preview 2](74/preview_2.png) | ![preview 3](74/preview_3.png) | ![preview 4](74/preview_4.png) | ![preview 5](74/preview_5.png) | ![preview 6](74/preview_6.png) | ![preview 7](74/preview_7.png) | ![preview 8](74/preview_8.png) | | 75 | 29 | [Download](75/dataset.zip) | ![preview 1](75/preview_1.png) | ![preview 2](75/preview_2.png) | ![preview 3](75/preview_3.png) | ![preview 4](75/preview_4.png) | ![preview 5](75/preview_5.png) | ![preview 6](75/preview_6.png) | ![preview 7](75/preview_7.png) | ![preview 8](75/preview_8.png) | | 76 | 69 | [Download](76/dataset.zip) | ![preview 1](76/preview_1.png) | ![preview 2](76/preview_2.png) | ![preview 3](76/preview_3.png) | ![preview 4](76/preview_4.png) | ![preview 5](76/preview_5.png) | ![preview 6](76/preview_6.png) | ![preview 7](76/preview_7.png) | ![preview 8](76/preview_8.png) | | 77 | 36 | [Download](77/dataset.zip) | ![preview 1](77/preview_1.png) | ![preview 2](77/preview_2.png) | ![preview 3](77/preview_3.png) | ![preview 4](77/preview_4.png) | ![preview 5](77/preview_5.png) | ![preview 6](77/preview_6.png) | ![preview 7](77/preview_7.png) | ![preview 8](77/preview_8.png) | | 78 | 55 | [Download](78/dataset.zip) | ![preview 1](78/preview_1.png) | ![preview 2](78/preview_2.png) | ![preview 3](78/preview_3.png) | ![preview 4](78/preview_4.png) | ![preview 5](78/preview_5.png) | ![preview 6](78/preview_6.png) | ![preview 7](78/preview_7.png) | ![preview 8](78/preview_8.png) | | 79 | 43 | [Download](79/dataset.zip) | ![preview 1](79/preview_1.png) | ![preview 2](79/preview_2.png) | ![preview 3](79/preview_3.png) | ![preview 4](79/preview_4.png) | ![preview 5](79/preview_5.png) | ![preview 6](79/preview_6.png) | ![preview 7](79/preview_7.png) | ![preview 8](79/preview_8.png) | | 80 | 46 | [Download](80/dataset.zip) | ![preview 1](80/preview_1.png) | ![preview 2](80/preview_2.png) | ![preview 3](80/preview_3.png) | ![preview 4](80/preview_4.png) | ![preview 5](80/preview_5.png) | ![preview 6](80/preview_6.png) | ![preview 7](80/preview_7.png) | ![preview 8](80/preview_8.png) | | 81 | 32 | [Download](81/dataset.zip) | ![preview 1](81/preview_1.png) | ![preview 2](81/preview_2.png) | ![preview 3](81/preview_3.png) | ![preview 4](81/preview_4.png) | ![preview 5](81/preview_5.png) | ![preview 6](81/preview_6.png) | ![preview 7](81/preview_7.png) | ![preview 8](81/preview_8.png) | | 82 | 48 | [Download](82/dataset.zip) | ![preview 1](82/preview_1.png) | ![preview 2](82/preview_2.png) | ![preview 3](82/preview_3.png) | ![preview 4](82/preview_4.png) | ![preview 5](82/preview_5.png) | ![preview 6](82/preview_6.png) | ![preview 7](82/preview_7.png) | ![preview 8](82/preview_8.png) | | 83 | 77 | [Download](83/dataset.zip) | ![preview 1](83/preview_1.png) | ![preview 2](83/preview_2.png) | ![preview 3](83/preview_3.png) | ![preview 4](83/preview_4.png) | ![preview 5](83/preview_5.png) | ![preview 6](83/preview_6.png) | ![preview 7](83/preview_7.png) | ![preview 8](83/preview_8.png) | | 84 | 35 | [Download](84/dataset.zip) | ![preview 1](84/preview_1.png) | ![preview 2](84/preview_2.png) | ![preview 3](84/preview_3.png) | ![preview 4](84/preview_4.png) | ![preview 5](84/preview_5.png) | ![preview 6](84/preview_6.png) | ![preview 7](84/preview_7.png) | ![preview 8](84/preview_8.png) | | 85 | 44 | [Download](85/dataset.zip) | ![preview 1](85/preview_1.png) | ![preview 2](85/preview_2.png) | ![preview 3](85/preview_3.png) | ![preview 4](85/preview_4.png) | ![preview 5](85/preview_5.png) | ![preview 6](85/preview_6.png) | ![preview 7](85/preview_7.png) | ![preview 8](85/preview_8.png) | | 86 | 33 | [Download](86/dataset.zip) | ![preview 1](86/preview_1.png) | ![preview 2](86/preview_2.png) | ![preview 3](86/preview_3.png) | ![preview 4](86/preview_4.png) | ![preview 5](86/preview_5.png) | ![preview 6](86/preview_6.png) | ![preview 7](86/preview_7.png) | ![preview 8](86/preview_8.png) | | 87 | 38 | [Download](87/dataset.zip) | ![preview 1](87/preview_1.png) | ![preview 2](87/preview_2.png) | ![preview 3](87/preview_3.png) | ![preview 4](87/preview_4.png) | ![preview 5](87/preview_5.png) | ![preview 6](87/preview_6.png) | ![preview 7](87/preview_7.png) | ![preview 8](87/preview_8.png) | | 88 | 58 | [Download](88/dataset.zip) | ![preview 1](88/preview_1.png) | ![preview 2](88/preview_2.png) | ![preview 3](88/preview_3.png) | ![preview 4](88/preview_4.png) | ![preview 5](88/preview_5.png) | ![preview 6](88/preview_6.png) | ![preview 7](88/preview_7.png) | ![preview 8](88/preview_8.png) | | 89 | 112 | [Download](89/dataset.zip) | ![preview 1](89/preview_1.png) | ![preview 2](89/preview_2.png) | ![preview 3](89/preview_3.png) | ![preview 4](89/preview_4.png) | ![preview 5](89/preview_5.png) | ![preview 6](89/preview_6.png) | ![preview 7](89/preview_7.png) | ![preview 8](89/preview_8.png) | | 90 | 33 | [Download](90/dataset.zip) | ![preview 1](90/preview_1.png) | ![preview 2](90/preview_2.png) | ![preview 3](90/preview_3.png) | ![preview 4](90/preview_4.png) | ![preview 5](90/preview_5.png) | ![preview 6](90/preview_6.png) | ![preview 7](90/preview_7.png) | ![preview 8](90/preview_8.png) | | 91 | 73 | [Download](91/dataset.zip) | ![preview 1](91/preview_1.png) | ![preview 2](91/preview_2.png) | ![preview 3](91/preview_3.png) | ![preview 4](91/preview_4.png) | ![preview 5](91/preview_5.png) | ![preview 6](91/preview_6.png) | ![preview 7](91/preview_7.png) | ![preview 8](91/preview_8.png) | | 92 | 254 | [Download](92/dataset.zip) | ![preview 1](92/preview_1.png) | ![preview 2](92/preview_2.png) | ![preview 3](92/preview_3.png) | ![preview 4](92/preview_4.png) | ![preview 5](92/preview_5.png) | ![preview 6](92/preview_6.png) | ![preview 7](92/preview_7.png) | ![preview 8](92/preview_8.png) | | 93 | 68 | [Download](93/dataset.zip) | ![preview 1](93/preview_1.png) | ![preview 2](93/preview_2.png) | ![preview 3](93/preview_3.png) | ![preview 4](93/preview_4.png) | ![preview 5](93/preview_5.png) | ![preview 6](93/preview_6.png) | ![preview 7](93/preview_7.png) | ![preview 8](93/preview_8.png) | | 94 | 27 | [Download](94/dataset.zip) | ![preview 1](94/preview_1.png) | ![preview 2](94/preview_2.png) | ![preview 3](94/preview_3.png) | ![preview 4](94/preview_4.png) | ![preview 5](94/preview_5.png) | ![preview 6](94/preview_6.png) | ![preview 7](94/preview_7.png) | ![preview 8](94/preview_8.png) | | 95 | 44 | [Download](95/dataset.zip) | ![preview 1](95/preview_1.png) | ![preview 2](95/preview_2.png) | ![preview 3](95/preview_3.png) | ![preview 4](95/preview_4.png) | ![preview 5](95/preview_5.png) | ![preview 6](95/preview_6.png) | ![preview 7](95/preview_7.png) | ![preview 8](95/preview_8.png) | | 96 | 88 | [Download](96/dataset.zip) | ![preview 1](96/preview_1.png) | ![preview 2](96/preview_2.png) | ![preview 3](96/preview_3.png) | ![preview 4](96/preview_4.png) | ![preview 5](96/preview_5.png) | ![preview 6](96/preview_6.png) | ![preview 7](96/preview_7.png) | ![preview 8](96/preview_8.png) | | 97 | 45 | [Download](97/dataset.zip) | ![preview 1](97/preview_1.png) | ![preview 2](97/preview_2.png) | ![preview 3](97/preview_3.png) | ![preview 4](97/preview_4.png) | ![preview 5](97/preview_5.png) | ![preview 6](97/preview_6.png) | ![preview 7](97/preview_7.png) | ![preview 8](97/preview_8.png) | | 98 | 16 | [Download](98/dataset.zip) | ![preview 1](98/preview_1.png) | ![preview 2](98/preview_2.png) | ![preview 3](98/preview_3.png) | ![preview 4](98/preview_4.png) | ![preview 5](98/preview_5.png) | ![preview 6](98/preview_6.png) | ![preview 7](98/preview_7.png) | ![preview 8](98/preview_8.png) | | 99 | 266 | [Download](99/dataset.zip) | ![preview 1](99/preview_1.png) | ![preview 2](99/preview_2.png) | ![preview 3](99/preview_3.png) | ![preview 4](99/preview_4.png) | ![preview 5](99/preview_5.png) | ![preview 6](99/preview_6.png) | ![preview 7](99/preview_7.png) | ![preview 8](99/preview_8.png) | | 100 | 458 | [Download](100/dataset.zip) | ![preview 1](100/preview_1.png) | ![preview 2](100/preview_2.png) | ![preview 3](100/preview_3.png) | ![preview 4](100/preview_4.png) | ![preview 5](100/preview_5.png) | ![preview 6](100/preview_6.png) | ![preview 7](100/preview_7.png) | ![preview 8](100/preview_8.png) | | 101 | 17 | [Download](101/dataset.zip) | ![preview 1](101/preview_1.png) | ![preview 2](101/preview_2.png) | ![preview 3](101/preview_3.png) | ![preview 4](101/preview_4.png) | ![preview 5](101/preview_5.png) | ![preview 6](101/preview_6.png) | ![preview 7](101/preview_7.png) | ![preview 8](101/preview_8.png) | | 102 | 67 | [Download](102/dataset.zip) | ![preview 1](102/preview_1.png) | ![preview 2](102/preview_2.png) | ![preview 3](102/preview_3.png) | ![preview 4](102/preview_4.png) | ![preview 5](102/preview_5.png) | ![preview 6](102/preview_6.png) | ![preview 7](102/preview_7.png) | ![preview 8](102/preview_8.png) | | 103 | 47 | [Download](103/dataset.zip) | ![preview 1](103/preview_1.png) | ![preview 2](103/preview_2.png) | ![preview 3](103/preview_3.png) | ![preview 4](103/preview_4.png) | ![preview 5](103/preview_5.png) | ![preview 6](103/preview_6.png) | ![preview 7](103/preview_7.png) | ![preview 8](103/preview_8.png) | | 104 | 65 | [Download](104/dataset.zip) | ![preview 1](104/preview_1.png) | ![preview 2](104/preview_2.png) | ![preview 3](104/preview_3.png) | ![preview 4](104/preview_4.png) | ![preview 5](104/preview_5.png) | ![preview 6](104/preview_6.png) | ![preview 7](104/preview_7.png) | ![preview 8](104/preview_8.png) | | 105 | 22 | [Download](105/dataset.zip) | ![preview 1](105/preview_1.png) | ![preview 2](105/preview_2.png) | ![preview 3](105/preview_3.png) | ![preview 4](105/preview_4.png) | ![preview 5](105/preview_5.png) | ![preview 6](105/preview_6.png) | ![preview 7](105/preview_7.png) | ![preview 8](105/preview_8.png) | | 106 | 21 | [Download](106/dataset.zip) | ![preview 1](106/preview_1.png) | ![preview 2](106/preview_2.png) | ![preview 3](106/preview_3.png) | ![preview 4](106/preview_4.png) | ![preview 5](106/preview_5.png) | ![preview 6](106/preview_6.png) | ![preview 7](106/preview_7.png) | ![preview 8](106/preview_8.png) | | 107 | 299 | [Download](107/dataset.zip) | ![preview 1](107/preview_1.png) | ![preview 2](107/preview_2.png) | ![preview 3](107/preview_3.png) | ![preview 4](107/preview_4.png) | ![preview 5](107/preview_5.png) | ![preview 6](107/preview_6.png) | ![preview 7](107/preview_7.png) | ![preview 8](107/preview_8.png) | | 108 | 19 | [Download](108/dataset.zip) | ![preview 1](108/preview_1.png) | ![preview 2](108/preview_2.png) | ![preview 3](108/preview_3.png) | ![preview 4](108/preview_4.png) | ![preview 5](108/preview_5.png) | ![preview 6](108/preview_6.png) | ![preview 7](108/preview_7.png) | ![preview 8](108/preview_8.png) | | 109 | 19 | [Download](109/dataset.zip) | ![preview 1](109/preview_1.png) | ![preview 2](109/preview_2.png) | ![preview 3](109/preview_3.png) | ![preview 4](109/preview_4.png) | ![preview 5](109/preview_5.png) | ![preview 6](109/preview_6.png) | ![preview 7](109/preview_7.png) | ![preview 8](109/preview_8.png) | | 110 | 27 | [Download](110/dataset.zip) | ![preview 1](110/preview_1.png) | ![preview 2](110/preview_2.png) | ![preview 3](110/preview_3.png) | ![preview 4](110/preview_4.png) | ![preview 5](110/preview_5.png) | ![preview 6](110/preview_6.png) | ![preview 7](110/preview_7.png) | ![preview 8](110/preview_8.png) | | 111 | 24 | [Download](111/dataset.zip) | ![preview 1](111/preview_1.png) | ![preview 2](111/preview_2.png) | ![preview 3](111/preview_3.png) | ![preview 4](111/preview_4.png) | ![preview 5](111/preview_5.png) | ![preview 6](111/preview_6.png) | ![preview 7](111/preview_7.png) | ![preview 8](111/preview_8.png) | | 112 | 50 | [Download](112/dataset.zip) | ![preview 1](112/preview_1.png) | ![preview 2](112/preview_2.png) | ![preview 3](112/preview_3.png) | ![preview 4](112/preview_4.png) | ![preview 5](112/preview_5.png) | ![preview 6](112/preview_6.png) | ![preview 7](112/preview_7.png) | ![preview 8](112/preview_8.png) | | 113 | 61 | [Download](113/dataset.zip) | ![preview 1](113/preview_1.png) | ![preview 2](113/preview_2.png) | ![preview 3](113/preview_3.png) | ![preview 4](113/preview_4.png) | ![preview 5](113/preview_5.png) | ![preview 6](113/preview_6.png) | ![preview 7](113/preview_7.png) | ![preview 8](113/preview_8.png) | | 114 | 39 | [Download](114/dataset.zip) | ![preview 1](114/preview_1.png) | ![preview 2](114/preview_2.png) | ![preview 3](114/preview_3.png) | ![preview 4](114/preview_4.png) | ![preview 5](114/preview_5.png) | ![preview 6](114/preview_6.png) | ![preview 7](114/preview_7.png) | ![preview 8](114/preview_8.png) | | 115 | 53 | [Download](115/dataset.zip) | ![preview 1](115/preview_1.png) | ![preview 2](115/preview_2.png) | ![preview 3](115/preview_3.png) | ![preview 4](115/preview_4.png) | ![preview 5](115/preview_5.png) | ![preview 6](115/preview_6.png) | ![preview 7](115/preview_7.png) | ![preview 8](115/preview_8.png) | | 116 | 49 | [Download](116/dataset.zip) | ![preview 1](116/preview_1.png) | ![preview 2](116/preview_2.png) | ![preview 3](116/preview_3.png) | ![preview 4](116/preview_4.png) | ![preview 5](116/preview_5.png) | ![preview 6](116/preview_6.png) | ![preview 7](116/preview_7.png) | ![preview 8](116/preview_8.png) | | 117 | 36 | [Download](117/dataset.zip) | ![preview 1](117/preview_1.png) | ![preview 2](117/preview_2.png) | ![preview 3](117/preview_3.png) | ![preview 4](117/preview_4.png) | ![preview 5](117/preview_5.png) | ![preview 6](117/preview_6.png) | ![preview 7](117/preview_7.png) | ![preview 8](117/preview_8.png) | | 118 | 20 | [Download](118/dataset.zip) | ![preview 1](118/preview_1.png) | ![preview 2](118/preview_2.png) | ![preview 3](118/preview_3.png) | ![preview 4](118/preview_4.png) | ![preview 5](118/preview_5.png) | ![preview 6](118/preview_6.png) | ![preview 7](118/preview_7.png) | ![preview 8](118/preview_8.png) | | 119 | 70 | [Download](119/dataset.zip) | ![preview 1](119/preview_1.png) | ![preview 2](119/preview_2.png) | ![preview 3](119/preview_3.png) | ![preview 4](119/preview_4.png) | ![preview 5](119/preview_5.png) | ![preview 6](119/preview_6.png) | ![preview 7](119/preview_7.png) | ![preview 8](119/preview_8.png) | | 120 | 23 | [Download](120/dataset.zip) | ![preview 1](120/preview_1.png) | ![preview 2](120/preview_2.png) | ![preview 3](120/preview_3.png) | ![preview 4](120/preview_4.png) | ![preview 5](120/preview_5.png) | ![preview 6](120/preview_6.png) | ![preview 7](120/preview_7.png) | ![preview 8](120/preview_8.png) | | 121 | 2212 | [Download](121/dataset.zip) | ![preview 1](121/preview_1.png) | ![preview 2](121/preview_2.png) | ![preview 3](121/preview_3.png) | ![preview 4](121/preview_4.png) | ![preview 5](121/preview_5.png) | ![preview 6](121/preview_6.png) | ![preview 7](121/preview_7.png) | ![preview 8](121/preview_8.png) | | 122 | 87 | [Download](122/dataset.zip) | ![preview 1](122/preview_1.png) | ![preview 2](122/preview_2.png) | ![preview 3](122/preview_3.png) | ![preview 4](122/preview_4.png) | ![preview 5](122/preview_5.png) | ![preview 6](122/preview_6.png) | ![preview 7](122/preview_7.png) | ![preview 8](122/preview_8.png) | | 123 | 17 | [Download](123/dataset.zip) | ![preview 1](123/preview_1.png) | ![preview 2](123/preview_2.png) | ![preview 3](123/preview_3.png) | ![preview 4](123/preview_4.png) | ![preview 5](123/preview_5.png) | ![preview 6](123/preview_6.png) | ![preview 7](123/preview_7.png) | ![preview 8](123/preview_8.png) | | 124 | 134 | [Download](124/dataset.zip) | ![preview 1](124/preview_1.png) | ![preview 2](124/preview_2.png) | ![preview 3](124/preview_3.png) | ![preview 4](124/preview_4.png) | ![preview 5](124/preview_5.png) | ![preview 6](124/preview_6.png) | ![preview 7](124/preview_7.png) | ![preview 8](124/preview_8.png) | | 125 | 64 | [Download](125/dataset.zip) | ![preview 1](125/preview_1.png) | ![preview 2](125/preview_2.png) | ![preview 3](125/preview_3.png) | ![preview 4](125/preview_4.png) | ![preview 5](125/preview_5.png) | ![preview 6](125/preview_6.png) | ![preview 7](125/preview_7.png) | ![preview 8](125/preview_8.png) | | 126 | 55 | [Download](126/dataset.zip) | ![preview 1](126/preview_1.png) | ![preview 2](126/preview_2.png) | ![preview 3](126/preview_3.png) | ![preview 4](126/preview_4.png) | ![preview 5](126/preview_5.png) | ![preview 6](126/preview_6.png) | ![preview 7](126/preview_7.png) | ![preview 8](126/preview_8.png) | | 127 | 28 | [Download](127/dataset.zip) | ![preview 1](127/preview_1.png) | ![preview 2](127/preview_2.png) | ![preview 3](127/preview_3.png) | ![preview 4](127/preview_4.png) | ![preview 5](127/preview_5.png) | ![preview 6](127/preview_6.png) | ![preview 7](127/preview_7.png) | ![preview 8](127/preview_8.png) | | 128 | 31 | [Download](128/dataset.zip) | ![preview 1](128/preview_1.png) | ![preview 2](128/preview_2.png) | ![preview 3](128/preview_3.png) | ![preview 4](128/preview_4.png) | ![preview 5](128/preview_5.png) | ![preview 6](128/preview_6.png) | ![preview 7](128/preview_7.png) | ![preview 8](128/preview_8.png) | | 129 | 95 | [Download](129/dataset.zip) | ![preview 1](129/preview_1.png) | ![preview 2](129/preview_2.png) | ![preview 3](129/preview_3.png) | ![preview 4](129/preview_4.png) | ![preview 5](129/preview_5.png) | ![preview 6](129/preview_6.png) | ![preview 7](129/preview_7.png) | ![preview 8](129/preview_8.png) | | 130 | 22 | [Download](130/dataset.zip) | ![preview 1](130/preview_1.png) | ![preview 2](130/preview_2.png) | ![preview 3](130/preview_3.png) | ![preview 4](130/preview_4.png) | ![preview 5](130/preview_5.png) | ![preview 6](130/preview_6.png) | ![preview 7](130/preview_7.png) | ![preview 8](130/preview_8.png) | | 131 | 19 | [Download](131/dataset.zip) | ![preview 1](131/preview_1.png) | ![preview 2](131/preview_2.png) | ![preview 3](131/preview_3.png) | ![preview 4](131/preview_4.png) | ![preview 5](131/preview_5.png) | ![preview 6](131/preview_6.png) | ![preview 7](131/preview_7.png) | ![preview 8](131/preview_8.png) | | 132 | 28 | [Download](132/dataset.zip) | ![preview 1](132/preview_1.png) | ![preview 2](132/preview_2.png) | ![preview 3](132/preview_3.png) | ![preview 4](132/preview_4.png) | ![preview 5](132/preview_5.png) | ![preview 6](132/preview_6.png) | ![preview 7](132/preview_7.png) | ![preview 8](132/preview_8.png) | | 133 | 145 | [Download](133/dataset.zip) | ![preview 1](133/preview_1.png) | ![preview 2](133/preview_2.png) | ![preview 3](133/preview_3.png) | ![preview 4](133/preview_4.png) | ![preview 5](133/preview_5.png) | ![preview 6](133/preview_6.png) | ![preview 7](133/preview_7.png) | ![preview 8](133/preview_8.png) | | 134 | 256 | [Download](134/dataset.zip) | ![preview 1](134/preview_1.png) | ![preview 2](134/preview_2.png) | ![preview 3](134/preview_3.png) | ![preview 4](134/preview_4.png) | ![preview 5](134/preview_5.png) | ![preview 6](134/preview_6.png) | ![preview 7](134/preview_7.png) | ![preview 8](134/preview_8.png) | | 135 | 20 | [Download](135/dataset.zip) | ![preview 1](135/preview_1.png) | ![preview 2](135/preview_2.png) | ![preview 3](135/preview_3.png) | ![preview 4](135/preview_4.png) | ![preview 5](135/preview_5.png) | ![preview 6](135/preview_6.png) | ![preview 7](135/preview_7.png) | ![preview 8](135/preview_8.png) | | 136 | 54 | [Download](136/dataset.zip) | ![preview 1](136/preview_1.png) | ![preview 2](136/preview_2.png) | ![preview 3](136/preview_3.png) | ![preview 4](136/preview_4.png) | ![preview 5](136/preview_5.png) | ![preview 6](136/preview_6.png) | ![preview 7](136/preview_7.png) | ![preview 8](136/preview_8.png) | | 137 | 82 | [Download](137/dataset.zip) | ![preview 1](137/preview_1.png) | ![preview 2](137/preview_2.png) | ![preview 3](137/preview_3.png) | ![preview 4](137/preview_4.png) | ![preview 5](137/preview_5.png) | ![preview 6](137/preview_6.png) | ![preview 7](137/preview_7.png) | ![preview 8](137/preview_8.png) | | 138 | 12 | [Download](138/dataset.zip) | ![preview 1](138/preview_1.png) | ![preview 2](138/preview_2.png) | ![preview 3](138/preview_3.png) | ![preview 4](138/preview_4.png) | ![preview 5](138/preview_5.png) | ![preview 6](138/preview_6.png) | ![preview 7](138/preview_7.png) | ![preview 8](138/preview_8.png) | | 139 | 25 | [Download](139/dataset.zip) | ![preview 1](139/preview_1.png) | ![preview 2](139/preview_2.png) | ![preview 3](139/preview_3.png) | ![preview 4](139/preview_4.png) | ![preview 5](139/preview_5.png) | ![preview 6](139/preview_6.png) | ![preview 7](139/preview_7.png) | ![preview 8](139/preview_8.png) | | 140 | 42 | [Download](140/dataset.zip) | ![preview 1](140/preview_1.png) | ![preview 2](140/preview_2.png) | ![preview 3](140/preview_3.png) | ![preview 4](140/preview_4.png) | ![preview 5](140/preview_5.png) | ![preview 6](140/preview_6.png) | ![preview 7](140/preview_7.png) | ![preview 8](140/preview_8.png) | | 141 | 26 | [Download](141/dataset.zip) | ![preview 1](141/preview_1.png) | ![preview 2](141/preview_2.png) | ![preview 3](141/preview_3.png) | ![preview 4](141/preview_4.png) | ![preview 5](141/preview_5.png) | ![preview 6](141/preview_6.png) | ![preview 7](141/preview_7.png) | ![preview 8](141/preview_8.png) | | 142 | 27 | [Download](142/dataset.zip) | ![preview 1](142/preview_1.png) | ![preview 2](142/preview_2.png) | ![preview 3](142/preview_3.png) | ![preview 4](142/preview_4.png) | ![preview 5](142/preview_5.png) | ![preview 6](142/preview_6.png) | ![preview 7](142/preview_7.png) | ![preview 8](142/preview_8.png) | | 143 | 22 | [Download](143/dataset.zip) | ![preview 1](143/preview_1.png) | ![preview 2](143/preview_2.png) | ![preview 3](143/preview_3.png) | ![preview 4](143/preview_4.png) | ![preview 5](143/preview_5.png) | ![preview 6](143/preview_6.png) | ![preview 7](143/preview_7.png) | ![preview 8](143/preview_8.png) | | 144 | 59 | [Download](144/dataset.zip) | ![preview 1](144/preview_1.png) | ![preview 2](144/preview_2.png) | ![preview 3](144/preview_3.png) | ![preview 4](144/preview_4.png) | ![preview 5](144/preview_5.png) | ![preview 6](144/preview_6.png) | ![preview 7](144/preview_7.png) | ![preview 8](144/preview_8.png) | | 145 | 37 | [Download](145/dataset.zip) | ![preview 1](145/preview_1.png) | ![preview 2](145/preview_2.png) | ![preview 3](145/preview_3.png) | ![preview 4](145/preview_4.png) | ![preview 5](145/preview_5.png) | ![preview 6](145/preview_6.png) | ![preview 7](145/preview_7.png) | ![preview 8](145/preview_8.png) | | 146 | 47 | [Download](146/dataset.zip) | ![preview 1](146/preview_1.png) | ![preview 2](146/preview_2.png) | ![preview 3](146/preview_3.png) | ![preview 4](146/preview_4.png) | ![preview 5](146/preview_5.png) | ![preview 6](146/preview_6.png) | ![preview 7](146/preview_7.png) | ![preview 8](146/preview_8.png) | | 147 | 64 | [Download](147/dataset.zip) | ![preview 1](147/preview_1.png) | ![preview 2](147/preview_2.png) | ![preview 3](147/preview_3.png) | ![preview 4](147/preview_4.png) | ![preview 5](147/preview_5.png) | ![preview 6](147/preview_6.png) | ![preview 7](147/preview_7.png) | ![preview 8](147/preview_8.png) | | 148 | 22 | [Download](148/dataset.zip) | ![preview 1](148/preview_1.png) | ![preview 2](148/preview_2.png) | ![preview 3](148/preview_3.png) | ![preview 4](148/preview_4.png) | ![preview 5](148/preview_5.png) | ![preview 6](148/preview_6.png) | ![preview 7](148/preview_7.png) | ![preview 8](148/preview_8.png) | | 149 | 12 | [Download](149/dataset.zip) | ![preview 1](149/preview_1.png) | ![preview 2](149/preview_2.png) | ![preview 3](149/preview_3.png) | ![preview 4](149/preview_4.png) | ![preview 5](149/preview_5.png) | ![preview 6](149/preview_6.png) | ![preview 7](149/preview_7.png) | ![preview 8](149/preview_8.png) | | 150 | 14 | [Download](150/dataset.zip) | ![preview 1](150/preview_1.png) | ![preview 2](150/preview_2.png) | ![preview 3](150/preview_3.png) | ![preview 4](150/preview_4.png) | ![preview 5](150/preview_5.png) | ![preview 6](150/preview_6.png) | ![preview 7](150/preview_7.png) | ![preview 8](150/preview_8.png) | | 151 | 37 | [Download](151/dataset.zip) | ![preview 1](151/preview_1.png) | ![preview 2](151/preview_2.png) | ![preview 3](151/preview_3.png) | ![preview 4](151/preview_4.png) | ![preview 5](151/preview_5.png) | ![preview 6](151/preview_6.png) | ![preview 7](151/preview_7.png) | ![preview 8](151/preview_8.png) | | 152 | 11 | [Download](152/dataset.zip) | ![preview 1](152/preview_1.png) | ![preview 2](152/preview_2.png) | ![preview 3](152/preview_3.png) | ![preview 4](152/preview_4.png) | ![preview 5](152/preview_5.png) | ![preview 6](152/preview_6.png) | ![preview 7](152/preview_7.png) | ![preview 8](152/preview_8.png) | | 153 | 37 | [Download](153/dataset.zip) | ![preview 1](153/preview_1.png) | ![preview 2](153/preview_2.png) | ![preview 3](153/preview_3.png) | ![preview 4](153/preview_4.png) | ![preview 5](153/preview_5.png) | ![preview 6](153/preview_6.png) | ![preview 7](153/preview_7.png) | ![preview 8](153/preview_8.png) | | 154 | 29 | [Download](154/dataset.zip) | ![preview 1](154/preview_1.png) | ![preview 2](154/preview_2.png) | ![preview 3](154/preview_3.png) | ![preview 4](154/preview_4.png) | ![preview 5](154/preview_5.png) | ![preview 6](154/preview_6.png) | ![preview 7](154/preview_7.png) | ![preview 8](154/preview_8.png) | | 155 | 32 | [Download](155/dataset.zip) | ![preview 1](155/preview_1.png) | ![preview 2](155/preview_2.png) | ![preview 3](155/preview_3.png) | ![preview 4](155/preview_4.png) | ![preview 5](155/preview_5.png) | ![preview 6](155/preview_6.png) | ![preview 7](155/preview_7.png) | ![preview 8](155/preview_8.png) | | 156 | 19 | [Download](156/dataset.zip) | ![preview 1](156/preview_1.png) | ![preview 2](156/preview_2.png) | ![preview 3](156/preview_3.png) | ![preview 4](156/preview_4.png) | ![preview 5](156/preview_5.png) | ![preview 6](156/preview_6.png) | ![preview 7](156/preview_7.png) | ![preview 8](156/preview_8.png) | | 157 | 35 | [Download](157/dataset.zip) | ![preview 1](157/preview_1.png) | ![preview 2](157/preview_2.png) | ![preview 3](157/preview_3.png) | ![preview 4](157/preview_4.png) | ![preview 5](157/preview_5.png) | ![preview 6](157/preview_6.png) | ![preview 7](157/preview_7.png) | ![preview 8](157/preview_8.png) | | 158 | 51 | [Download](158/dataset.zip) | ![preview 1](158/preview_1.png) | ![preview 2](158/preview_2.png) | ![preview 3](158/preview_3.png) | ![preview 4](158/preview_4.png) | ![preview 5](158/preview_5.png) | ![preview 6](158/preview_6.png) | ![preview 7](158/preview_7.png) | ![preview 8](158/preview_8.png) | | 159 | 21 | [Download](159/dataset.zip) | ![preview 1](159/preview_1.png) | ![preview 2](159/preview_2.png) | ![preview 3](159/preview_3.png) | ![preview 4](159/preview_4.png) | ![preview 5](159/preview_5.png) | ![preview 6](159/preview_6.png) | ![preview 7](159/preview_7.png) | ![preview 8](159/preview_8.png) | | 160 | 23 | [Download](160/dataset.zip) | ![preview 1](160/preview_1.png) | ![preview 2](160/preview_2.png) | ![preview 3](160/preview_3.png) | ![preview 4](160/preview_4.png) | ![preview 5](160/preview_5.png) | ![preview 6](160/preview_6.png) | ![preview 7](160/preview_7.png) | ![preview 8](160/preview_8.png) | | 161 | 30 | [Download](161/dataset.zip) | ![preview 1](161/preview_1.png) | ![preview 2](161/preview_2.png) | ![preview 3](161/preview_3.png) | ![preview 4](161/preview_4.png) | ![preview 5](161/preview_5.png) | ![preview 6](161/preview_6.png) | ![preview 7](161/preview_7.png) | ![preview 8](161/preview_8.png) | | 162 | 18 | [Download](162/dataset.zip) | ![preview 1](162/preview_1.png) | ![preview 2](162/preview_2.png) | ![preview 3](162/preview_3.png) | ![preview 4](162/preview_4.png) | ![preview 5](162/preview_5.png) | ![preview 6](162/preview_6.png) | ![preview 7](162/preview_7.png) | ![preview 8](162/preview_8.png) | | 163 | 26 | [Download](163/dataset.zip) | ![preview 1](163/preview_1.png) | ![preview 2](163/preview_2.png) | ![preview 3](163/preview_3.png) | ![preview 4](163/preview_4.png) | ![preview 5](163/preview_5.png) | ![preview 6](163/preview_6.png) | ![preview 7](163/preview_7.png) | ![preview 8](163/preview_8.png) | | 164 | 24 | [Download](164/dataset.zip) | ![preview 1](164/preview_1.png) | ![preview 2](164/preview_2.png) | ![preview 3](164/preview_3.png) | ![preview 4](164/preview_4.png) | ![preview 5](164/preview_5.png) | ![preview 6](164/preview_6.png) | ![preview 7](164/preview_7.png) | ![preview 8](164/preview_8.png) | | 165 | 25 | [Download](165/dataset.zip) | ![preview 1](165/preview_1.png) | ![preview 2](165/preview_2.png) | ![preview 3](165/preview_3.png) | ![preview 4](165/preview_4.png) | ![preview 5](165/preview_5.png) | ![preview 6](165/preview_6.png) | ![preview 7](165/preview_7.png) | ![preview 8](165/preview_8.png) | | 166 | 16 | [Download](166/dataset.zip) | ![preview 1](166/preview_1.png) | ![preview 2](166/preview_2.png) | ![preview 3](166/preview_3.png) | ![preview 4](166/preview_4.png) | ![preview 5](166/preview_5.png) | ![preview 6](166/preview_6.png) | ![preview 7](166/preview_7.png) | ![preview 8](166/preview_8.png) | | 167 | 39 | [Download](167/dataset.zip) | ![preview 1](167/preview_1.png) | ![preview 2](167/preview_2.png) | ![preview 3](167/preview_3.png) | ![preview 4](167/preview_4.png) | ![preview 5](167/preview_5.png) | ![preview 6](167/preview_6.png) | ![preview 7](167/preview_7.png) | ![preview 8](167/preview_8.png) | | 168 | 44 | [Download](168/dataset.zip) | ![preview 1](168/preview_1.png) | ![preview 2](168/preview_2.png) | ![preview 3](168/preview_3.png) | ![preview 4](168/preview_4.png) | ![preview 5](168/preview_5.png) | ![preview 6](168/preview_6.png) | ![preview 7](168/preview_7.png) | ![preview 8](168/preview_8.png) | | 169 | 20 | [Download](169/dataset.zip) | ![preview 1](169/preview_1.png) | ![preview 2](169/preview_2.png) | ![preview 3](169/preview_3.png) | ![preview 4](169/preview_4.png) | ![preview 5](169/preview_5.png) | ![preview 6](169/preview_6.png) | ![preview 7](169/preview_7.png) | ![preview 8](169/preview_8.png) | | 170 | 26 | [Download](170/dataset.zip) | ![preview 1](170/preview_1.png) | ![preview 2](170/preview_2.png) | ![preview 3](170/preview_3.png) | ![preview 4](170/preview_4.png) | ![preview 5](170/preview_5.png) | ![preview 6](170/preview_6.png) | ![preview 7](170/preview_7.png) | ![preview 8](170/preview_8.png) | | 171 | 29 | [Download](171/dataset.zip) | ![preview 1](171/preview_1.png) | ![preview 2](171/preview_2.png) | ![preview 3](171/preview_3.png) | ![preview 4](171/preview_4.png) | ![preview 5](171/preview_5.png) | ![preview 6](171/preview_6.png) | ![preview 7](171/preview_7.png) | ![preview 8](171/preview_8.png) | | 172 | 29 | [Download](172/dataset.zip) | ![preview 1](172/preview_1.png) | ![preview 2](172/preview_2.png) | ![preview 3](172/preview_3.png) | ![preview 4](172/preview_4.png) | ![preview 5](172/preview_5.png) | ![preview 6](172/preview_6.png) | ![preview 7](172/preview_7.png) | ![preview 8](172/preview_8.png) | | 173 | 12 | [Download](173/dataset.zip) | ![preview 1](173/preview_1.png) | ![preview 2](173/preview_2.png) | ![preview 3](173/preview_3.png) | ![preview 4](173/preview_4.png) | ![preview 5](173/preview_5.png) | ![preview 6](173/preview_6.png) | ![preview 7](173/preview_7.png) | ![preview 8](173/preview_8.png) | | 174 | 24 | [Download](174/dataset.zip) | ![preview 1](174/preview_1.png) | ![preview 2](174/preview_2.png) | ![preview 3](174/preview_3.png) | ![preview 4](174/preview_4.png) | ![preview 5](174/preview_5.png) | ![preview 6](174/preview_6.png) | ![preview 7](174/preview_7.png) | ![preview 8](174/preview_8.png) | | 175 | 47 | [Download](175/dataset.zip) | ![preview 1](175/preview_1.png) | ![preview 2](175/preview_2.png) | ![preview 3](175/preview_3.png) | ![preview 4](175/preview_4.png) | ![preview 5](175/preview_5.png) | ![preview 6](175/preview_6.png) | ![preview 7](175/preview_7.png) | ![preview 8](175/preview_8.png) | | 176 | 16 | [Download](176/dataset.zip) | ![preview 1](176/preview_1.png) | ![preview 2](176/preview_2.png) | ![preview 3](176/preview_3.png) | ![preview 4](176/preview_4.png) | ![preview 5](176/preview_5.png) | ![preview 6](176/preview_6.png) | ![preview 7](176/preview_7.png) | ![preview 8](176/preview_8.png) | | 177 | 22 | [Download](177/dataset.zip) | ![preview 1](177/preview_1.png) | ![preview 2](177/preview_2.png) | ![preview 3](177/preview_3.png) | ![preview 4](177/preview_4.png) | ![preview 5](177/preview_5.png) | ![preview 6](177/preview_6.png) | ![preview 7](177/preview_7.png) | ![preview 8](177/preview_8.png) | | 178 | 291 | [Download](178/dataset.zip) | ![preview 1](178/preview_1.png) | ![preview 2](178/preview_2.png) | ![preview 3](178/preview_3.png) | ![preview 4](178/preview_4.png) | ![preview 5](178/preview_5.png) | ![preview 6](178/preview_6.png) | ![preview 7](178/preview_7.png) | ![preview 8](178/preview_8.png) | | 179 | 21 | [Download](179/dataset.zip) | ![preview 1](179/preview_1.png) | ![preview 2](179/preview_2.png) | ![preview 3](179/preview_3.png) | ![preview 4](179/preview_4.png) | ![preview 5](179/preview_5.png) | ![preview 6](179/preview_6.png) | ![preview 7](179/preview_7.png) | ![preview 8](179/preview_8.png) | | 180 | 148 | [Download](180/dataset.zip) | ![preview 1](180/preview_1.png) | ![preview 2](180/preview_2.png) | ![preview 3](180/preview_3.png) | ![preview 4](180/preview_4.png) | ![preview 5](180/preview_5.png) | ![preview 6](180/preview_6.png) | ![preview 7](180/preview_7.png) | ![preview 8](180/preview_8.png) | | 181 | 32 | [Download](181/dataset.zip) | ![preview 1](181/preview_1.png) | ![preview 2](181/preview_2.png) | ![preview 3](181/preview_3.png) | ![preview 4](181/preview_4.png) | ![preview 5](181/preview_5.png) | ![preview 6](181/preview_6.png) | ![preview 7](181/preview_7.png) | ![preview 8](181/preview_8.png) | | 182 | 31 | [Download](182/dataset.zip) | ![preview 1](182/preview_1.png) | ![preview 2](182/preview_2.png) | ![preview 3](182/preview_3.png) | ![preview 4](182/preview_4.png) | ![preview 5](182/preview_5.png) | ![preview 6](182/preview_6.png) | ![preview 7](182/preview_7.png) | ![preview 8](182/preview_8.png) | | 183 | 23 | [Download](183/dataset.zip) | ![preview 1](183/preview_1.png) | ![preview 2](183/preview_2.png) | ![preview 3](183/preview_3.png) | ![preview 4](183/preview_4.png) | ![preview 5](183/preview_5.png) | ![preview 6](183/preview_6.png) | ![preview 7](183/preview_7.png) | ![preview 8](183/preview_8.png) | | 184 | 22 | [Download](184/dataset.zip) | ![preview 1](184/preview_1.png) | ![preview 2](184/preview_2.png) | ![preview 3](184/preview_3.png) | ![preview 4](184/preview_4.png) | ![preview 5](184/preview_5.png) | ![preview 6](184/preview_6.png) | ![preview 7](184/preview_7.png) | ![preview 8](184/preview_8.png) | | 185 | 16 | [Download](185/dataset.zip) | ![preview 1](185/preview_1.png) | ![preview 2](185/preview_2.png) | ![preview 3](185/preview_3.png) | ![preview 4](185/preview_4.png) | ![preview 5](185/preview_5.png) | ![preview 6](185/preview_6.png) | ![preview 7](185/preview_7.png) | ![preview 8](185/preview_8.png) | | 186 | 28 | [Download](186/dataset.zip) | ![preview 1](186/preview_1.png) | ![preview 2](186/preview_2.png) | ![preview 3](186/preview_3.png) | ![preview 4](186/preview_4.png) | ![preview 5](186/preview_5.png) | ![preview 6](186/preview_6.png) | ![preview 7](186/preview_7.png) | ![preview 8](186/preview_8.png) | | 187 | 26 | [Download](187/dataset.zip) | ![preview 1](187/preview_1.png) | ![preview 2](187/preview_2.png) | ![preview 3](187/preview_3.png) | ![preview 4](187/preview_4.png) | ![preview 5](187/preview_5.png) | ![preview 6](187/preview_6.png) | ![preview 7](187/preview_7.png) | ![preview 8](187/preview_8.png) | | 188 | 21 | [Download](188/dataset.zip) | ![preview 1](188/preview_1.png) | ![preview 2](188/preview_2.png) | ![preview 3](188/preview_3.png) | ![preview 4](188/preview_4.png) | ![preview 5](188/preview_5.png) | ![preview 6](188/preview_6.png) | ![preview 7](188/preview_7.png) | ![preview 8](188/preview_8.png) | | 189 | 45 | [Download](189/dataset.zip) | ![preview 1](189/preview_1.png) | ![preview 2](189/preview_2.png) | ![preview 3](189/preview_3.png) | ![preview 4](189/preview_4.png) | ![preview 5](189/preview_5.png) | ![preview 6](189/preview_6.png) | ![preview 7](189/preview_7.png) | ![preview 8](189/preview_8.png) | | 190 | 20 | [Download](190/dataset.zip) | ![preview 1](190/preview_1.png) | ![preview 2](190/preview_2.png) | ![preview 3](190/preview_3.png) | ![preview 4](190/preview_4.png) | ![preview 5](190/preview_5.png) | ![preview 6](190/preview_6.png) | ![preview 7](190/preview_7.png) | ![preview 8](190/preview_8.png) | | 191 | 24 | [Download](191/dataset.zip) | ![preview 1](191/preview_1.png) | ![preview 2](191/preview_2.png) | ![preview 3](191/preview_3.png) | ![preview 4](191/preview_4.png) | ![preview 5](191/preview_5.png) | ![preview 6](191/preview_6.png) | ![preview 7](191/preview_7.png) | ![preview 8](191/preview_8.png) | | 192 | 20 | [Download](192/dataset.zip) | ![preview 1](192/preview_1.png) | ![preview 2](192/preview_2.png) | ![preview 3](192/preview_3.png) | ![preview 4](192/preview_4.png) | ![preview 5](192/preview_5.png) | ![preview 6](192/preview_6.png) | ![preview 7](192/preview_7.png) | ![preview 8](192/preview_8.png) | | 193 | 23 | [Download](193/dataset.zip) | ![preview 1](193/preview_1.png) | ![preview 2](193/preview_2.png) | ![preview 3](193/preview_3.png) | ![preview 4](193/preview_4.png) | ![preview 5](193/preview_5.png) | ![preview 6](193/preview_6.png) | ![preview 7](193/preview_7.png) | ![preview 8](193/preview_8.png) | | 194 | 33 | [Download](194/dataset.zip) | ![preview 1](194/preview_1.png) | ![preview 2](194/preview_2.png) | ![preview 3](194/preview_3.png) | ![preview 4](194/preview_4.png) | ![preview 5](194/preview_5.png) | ![preview 6](194/preview_6.png) | ![preview 7](194/preview_7.png) | ![preview 8](194/preview_8.png) | | 195 | 12 | [Download](195/dataset.zip) | ![preview 1](195/preview_1.png) | ![preview 2](195/preview_2.png) | ![preview 3](195/preview_3.png) | ![preview 4](195/preview_4.png) | ![preview 5](195/preview_5.png) | ![preview 6](195/preview_6.png) | ![preview 7](195/preview_7.png) | ![preview 8](195/preview_8.png) | | 196 | 15 | [Download](196/dataset.zip) | ![preview 1](196/preview_1.png) | ![preview 2](196/preview_2.png) | ![preview 3](196/preview_3.png) | ![preview 4](196/preview_4.png) | ![preview 5](196/preview_5.png) | ![preview 6](196/preview_6.png) | ![preview 7](196/preview_7.png) | ![preview 8](196/preview_8.png) | | 197 | 9 | [Download](197/dataset.zip) | ![preview 1](197/preview_1.png) | ![preview 2](197/preview_2.png) | ![preview 3](197/preview_3.png) | ![preview 4](197/preview_4.png) | ![preview 5](197/preview_5.png) | ![preview 6](197/preview_6.png) | ![preview 7](197/preview_7.png) | ![preview 8](197/preview_8.png) | | 198 | 10 | [Download](198/dataset.zip) | ![preview 1](198/preview_1.png) | ![preview 2](198/preview_2.png) | ![preview 3](198/preview_3.png) | ![preview 4](198/preview_4.png) | ![preview 5](198/preview_5.png) | ![preview 6](198/preview_6.png) | ![preview 7](198/preview_7.png) | ![preview 8](198/preview_8.png) | | 199 | 24 | [Download](199/dataset.zip) | ![preview 1](199/preview_1.png) | ![preview 2](199/preview_2.png) | ![preview 3](199/preview_3.png) | ![preview 4](199/preview_4.png) | ![preview 5](199/preview_5.png) | ![preview 6](199/preview_6.png) | ![preview 7](199/preview_7.png) | ![preview 8](199/preview_8.png) | | 200 | 24 | [Download](200/dataset.zip) | ![preview 1](200/preview_1.png) | ![preview 2](200/preview_2.png) | ![preview 3](200/preview_3.png) | ![preview 4](200/preview_4.png) | ![preview 5](200/preview_5.png) | ![preview 6](200/preview_6.png) | ![preview 7](200/preview_7.png) | ![preview 8](200/preview_8.png) | | 201 | 18 | [Download](201/dataset.zip) | ![preview 1](201/preview_1.png) | ![preview 2](201/preview_2.png) | ![preview 3](201/preview_3.png) | ![preview 4](201/preview_4.png) | ![preview 5](201/preview_5.png) | ![preview 6](201/preview_6.png) | ![preview 7](201/preview_7.png) | ![preview 8](201/preview_8.png) | | 202 | 43 | [Download](202/dataset.zip) | ![preview 1](202/preview_1.png) | ![preview 2](202/preview_2.png) | ![preview 3](202/preview_3.png) | ![preview 4](202/preview_4.png) | ![preview 5](202/preview_5.png) | ![preview 6](202/preview_6.png) | ![preview 7](202/preview_7.png) | ![preview 8](202/preview_8.png) | | 203 | 15 | [Download](203/dataset.zip) | ![preview 1](203/preview_1.png) | ![preview 2](203/preview_2.png) | ![preview 3](203/preview_3.png) | ![preview 4](203/preview_4.png) | ![preview 5](203/preview_5.png) | ![preview 6](203/preview_6.png) | ![preview 7](203/preview_7.png) | ![preview 8](203/preview_8.png) | | 204 | 9 | [Download](204/dataset.zip) | ![preview 1](204/preview_1.png) | ![preview 2](204/preview_2.png) | ![preview 3](204/preview_3.png) | ![preview 4](204/preview_4.png) | ![preview 5](204/preview_5.png) | ![preview 6](204/preview_6.png) | ![preview 7](204/preview_7.png) | ![preview 8](204/preview_8.png) | | 205 | 21 | [Download](205/dataset.zip) | ![preview 1](205/preview_1.png) | ![preview 2](205/preview_2.png) | ![preview 3](205/preview_3.png) | ![preview 4](205/preview_4.png) | ![preview 5](205/preview_5.png) | ![preview 6](205/preview_6.png) | ![preview 7](205/preview_7.png) | ![preview 8](205/preview_8.png) | | 206 | 24 | [Download](206/dataset.zip) | ![preview 1](206/preview_1.png) | ![preview 2](206/preview_2.png) | ![preview 3](206/preview_3.png) | ![preview 4](206/preview_4.png) | ![preview 5](206/preview_5.png) | ![preview 6](206/preview_6.png) | ![preview 7](206/preview_7.png) | ![preview 8](206/preview_8.png) | | 207 | 17 | [Download](207/dataset.zip) | ![preview 1](207/preview_1.png) | ![preview 2](207/preview_2.png) | ![preview 3](207/preview_3.png) | ![preview 4](207/preview_4.png) | ![preview 5](207/preview_5.png) | ![preview 6](207/preview_6.png) | ![preview 7](207/preview_7.png) | ![preview 8](207/preview_8.png) | | 208 | 6 | [Download](208/dataset.zip) | ![preview 1](208/preview_1.png) | ![preview 2](208/preview_2.png) | ![preview 3](208/preview_3.png) | ![preview 4](208/preview_4.png) | ![preview 5](208/preview_5.png) | ![preview 6](208/preview_6.png) | N/A | N/A | | 209 | 21 | [Download](209/dataset.zip) | ![preview 1](209/preview_1.png) | ![preview 2](209/preview_2.png) | ![preview 3](209/preview_3.png) | ![preview 4](209/preview_4.png) | ![preview 5](209/preview_5.png) | ![preview 6](209/preview_6.png) | ![preview 7](209/preview_7.png) | ![preview 8](209/preview_8.png) | | 210 | 13 | [Download](210/dataset.zip) | ![preview 1](210/preview_1.png) | ![preview 2](210/preview_2.png) | ![preview 3](210/preview_3.png) | ![preview 4](210/preview_4.png) | ![preview 5](210/preview_5.png) | ![preview 6](210/preview_6.png) | ![preview 7](210/preview_7.png) | ![preview 8](210/preview_8.png) | | 211 | 8 | [Download](211/dataset.zip) | ![preview 1](211/preview_1.png) | ![preview 2](211/preview_2.png) | ![preview 3](211/preview_3.png) | ![preview 4](211/preview_4.png) | ![preview 5](211/preview_5.png) | ![preview 6](211/preview_6.png) | ![preview 7](211/preview_7.png) | ![preview 8](211/preview_8.png) | | 212 | 26 | [Download](212/dataset.zip) | ![preview 1](212/preview_1.png) | ![preview 2](212/preview_2.png) | ![preview 3](212/preview_3.png) | ![preview 4](212/preview_4.png) | ![preview 5](212/preview_5.png) | ![preview 6](212/preview_6.png) | ![preview 7](212/preview_7.png) | ![preview 8](212/preview_8.png) | | 213 | 25 | [Download](213/dataset.zip) | ![preview 1](213/preview_1.png) | ![preview 2](213/preview_2.png) | ![preview 3](213/preview_3.png) | ![preview 4](213/preview_4.png) | ![preview 5](213/preview_5.png) | ![preview 6](213/preview_6.png) | ![preview 7](213/preview_7.png) | ![preview 8](213/preview_8.png) | | 214 | 12 | [Download](214/dataset.zip) | ![preview 1](214/preview_1.png) | ![preview 2](214/preview_2.png) | ![preview 3](214/preview_3.png) | ![preview 4](214/preview_4.png) | ![preview 5](214/preview_5.png) | ![preview 6](214/preview_6.png) | ![preview 7](214/preview_7.png) | ![preview 8](214/preview_8.png) | | 215 | 18 | [Download](215/dataset.zip) | ![preview 1](215/preview_1.png) | ![preview 2](215/preview_2.png) | ![preview 3](215/preview_3.png) | ![preview 4](215/preview_4.png) | ![preview 5](215/preview_5.png) | ![preview 6](215/preview_6.png) | ![preview 7](215/preview_7.png) | ![preview 8](215/preview_8.png) | | 216 | 6 | [Download](216/dataset.zip) | ![preview 1](216/preview_1.png) | ![preview 2](216/preview_2.png) | ![preview 3](216/preview_3.png) | ![preview 4](216/preview_4.png) | ![preview 5](216/preview_5.png) | ![preview 6](216/preview_6.png) | N/A | N/A | | 217 | 5 | [Download](217/dataset.zip) | ![preview 1](217/preview_1.png) | ![preview 2](217/preview_2.png) | ![preview 3](217/preview_3.png) | ![preview 4](217/preview_4.png) | ![preview 5](217/preview_5.png) | N/A | N/A | N/A | | 218 | 7 | [Download](218/dataset.zip) | ![preview 1](218/preview_1.png) | ![preview 2](218/preview_2.png) | ![preview 3](218/preview_3.png) | ![preview 4](218/preview_4.png) | ![preview 5](218/preview_5.png) | ![preview 6](218/preview_6.png) | ![preview 7](218/preview_7.png) | N/A | | 219 | 9 | [Download](219/dataset.zip) | ![preview 1](219/preview_1.png) | ![preview 2](219/preview_2.png) | ![preview 3](219/preview_3.png) | ![preview 4](219/preview_4.png) | ![preview 5](219/preview_5.png) | ![preview 6](219/preview_6.png) | ![preview 7](219/preview_7.png) | ![preview 8](219/preview_8.png) | | 220 | 10 | [Download](220/dataset.zip) | ![preview 1](220/preview_1.png) | ![preview 2](220/preview_2.png) | ![preview 3](220/preview_3.png) | ![preview 4](220/preview_4.png) | ![preview 5](220/preview_5.png) | ![preview 6](220/preview_6.png) | ![preview 7](220/preview_7.png) | ![preview 8](220/preview_8.png) | | 221 | 27 | [Download](221/dataset.zip) | ![preview 1](221/preview_1.png) | ![preview 2](221/preview_2.png) | ![preview 3](221/preview_3.png) | ![preview 4](221/preview_4.png) | ![preview 5](221/preview_5.png) | ![preview 6](221/preview_6.png) | ![preview 7](221/preview_7.png) | ![preview 8](221/preview_8.png) | | 222 | 9 | [Download](222/dataset.zip) | ![preview 1](222/preview_1.png) | ![preview 2](222/preview_2.png) | ![preview 3](222/preview_3.png) | ![preview 4](222/preview_4.png) | ![preview 5](222/preview_5.png) | ![preview 6](222/preview_6.png) | ![preview 7](222/preview_7.png) | ![preview 8](222/preview_8.png) | | 223 | 8 | [Download](223/dataset.zip) | ![preview 1](223/preview_1.png) | ![preview 2](223/preview_2.png) | ![preview 3](223/preview_3.png) | ![preview 4](223/preview_4.png) | ![preview 5](223/preview_5.png) | ![preview 6](223/preview_6.png) | ![preview 7](223/preview_7.png) | ![preview 8](223/preview_8.png) | | noise | 484 | [Download](-1/dataset.zip) | ![preview 1](-1/preview_1.png) | ![preview 2](-1/preview_2.png) | ![preview 3](-1/preview_3.png) | ![preview 4](-1/preview_4.png) | ![preview 5](-1/preview_5.png) | ![preview 6](-1/preview_6.png) | ![preview 7](-1/preview_7.png) | ![preview 8](-1/preview_8.png) |
[ -0.7198167443275452, -0.18609921634197235, 0.13977576792240143, 0.18896222114562988, -0.25711509585380554, -0.07544581592082977, -0.015167162753641605, -0.37679538130760193, 0.6681821942329407, 0.4478287398815155, -0.8903675675392151, -0.8560002446174622, -0.6857640743255615, 0.49271339178...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_uukuguy__speechless-mistral-six-in-one-7b_public
open-llm-leaderboard
2023-11-12T18:18:50Z
0
0
null
[ "region:us" ]
2023-11-12T18:18:50Z
2023-11-12T18:17:49.000Z
2023-11-12T18:17:49
--- pretty_name: Evaluation run of uukuguy/speechless-mistral-six-in-one-7b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [uukuguy/speechless-mistral-six-in-one-7b](https://huggingface.co/uukuguy/speechless-mistral-six-in-one-7b)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_uukuguy__speechless-mistral-six-in-one-7b_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-12T18:14:50.698039](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-mistral-six-in-one-7b_public/blob/main/results_2023-11-12T18-14-50.698039.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6276350372644707,\n\ \ \"acc_stderr\": 0.03243221410411415,\n \"acc_norm\": 0.636467872903276,\n\ \ \"acc_norm_stderr\": 0.03312856166774958,\n \"mc1\": 0.40514075887392903,\n\ \ \"mc1_stderr\": 0.017185611727753368,\n \"mc2\": 0.5776708582574724,\n\ \ \"mc2_stderr\": 0.01544223129155929,\n \"em\": 0.0041946308724832215,\n\ \ \"em_stderr\": 0.0006618716168266571,\n \"f1\": 0.09125943791946291,\n\ \ \"f1_stderr\": 0.0018243790800558358\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5981228668941979,\n \"acc_stderr\": 0.014327268614578276,\n\ \ \"acc_norm\": 0.6296928327645052,\n \"acc_norm_stderr\": 0.01411129875167495\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.652459669388568,\n\ \ \"acc_stderr\": 0.004752158936871872,\n \"acc_norm\": 0.8460466042620992,\n\ \ \"acc_norm_stderr\": 0.00360166483871892\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \ \ \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n\ \ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n\ \ \"acc_stderr\": 0.04188307537595853,\n \"acc_norm\": 0.6222222222222222,\n\ \ \"acc_norm_stderr\": 0.04188307537595853\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.03860731599316092,\n\ \ \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.03860731599316092\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n\ \ \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \ \ \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.02872750295788027,\n\ \ \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.02872750295788027\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n\ \ \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n\ \ \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \ \ \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n\ \ \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6184971098265896,\n\ \ \"acc_stderr\": 0.03703851193099521,\n \"acc_norm\": 0.6184971098265896,\n\ \ \"acc_norm_stderr\": 0.03703851193099521\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105654,\n\ \ \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\": 0.78,\n\ \ \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.5319148936170213,\n \"acc_stderr\": 0.03261936918467382,\n\ \ \"acc_norm\": 0.5319148936170213,\n \"acc_norm_stderr\": 0.03261936918467382\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n\ \ \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.45614035087719296,\n\ \ \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.6,\n \"acc_stderr\": 0.040824829046386284,\n \ \ \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.040824829046386284\n \ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.4074074074074074,\n \"acc_stderr\": 0.025305906241590626,\n \"\ acc_norm\": 0.4074074074074074,\n \"acc_norm_stderr\": 0.025305906241590626\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n\ \ \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n\ \ \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n\ \ \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n\ \ \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.5270935960591133,\n \"acc_stderr\": 0.03512819077876106,\n\ \ \"acc_norm\": 0.5270935960591133,\n \"acc_norm_stderr\": 0.03512819077876106\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\"\ : 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n\ \ \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494563,\n \"\ acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494563\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8704663212435233,\n \"acc_stderr\": 0.024233532297758723,\n\ \ \"acc_norm\": 0.8704663212435233,\n \"acc_norm_stderr\": 0.024233532297758723\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.6410256410256411,\n \"acc_stderr\": 0.024321738484602354,\n\ \ \"acc_norm\": 0.6410256410256411,\n \"acc_norm_stderr\": 0.024321738484602354\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.34814814814814815,\n \"acc_stderr\": 0.029045600290616255,\n \ \ \"acc_norm\": 0.34814814814814815,\n \"acc_norm_stderr\": 0.029045600290616255\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886783,\n\ \ \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886783\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"\ acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.8385321100917431,\n \"acc_stderr\": 0.015776239256163224,\n \"\ acc_norm\": 0.8385321100917431,\n \"acc_norm_stderr\": 0.015776239256163224\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.49537037037037035,\n \"acc_stderr\": 0.03409825519163572,\n \"\ acc_norm\": 0.49537037037037035,\n \"acc_norm_stderr\": 0.03409825519163572\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7990196078431373,\n \"acc_stderr\": 0.028125972265654373,\n \"\ acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.028125972265654373\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7637130801687764,\n \"acc_stderr\": 0.02765215314415927,\n \ \ \"acc_norm\": 0.7637130801687764,\n \"acc_norm_stderr\": 0.02765215314415927\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n\ \ \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n\ \ \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.03768335959728742,\n\ \ \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.03768335959728742\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"\ acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n\ \ \"acc_stderr\": 0.03957835471980979,\n \"acc_norm\": 0.7870370370370371,\n\ \ \"acc_norm_stderr\": 0.03957835471980979\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n\ \ \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n\ \ \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n\ \ \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n\ \ \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n\ \ \"acc_stderr\": 0.020588491316092375,\n \"acc_norm\": 0.8888888888888888,\n\ \ \"acc_norm_stderr\": 0.020588491316092375\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \ \ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8122605363984674,\n\ \ \"acc_stderr\": 0.013964393769899136,\n \"acc_norm\": 0.8122605363984674,\n\ \ \"acc_norm_stderr\": 0.013964393769899136\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.708092485549133,\n \"acc_stderr\": 0.024476994076247337,\n\ \ \"acc_norm\": 0.708092485549133,\n \"acc_norm_stderr\": 0.024476994076247337\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.41899441340782123,\n\ \ \"acc_stderr\": 0.016501579306861677,\n \"acc_norm\": 0.41899441340782123,\n\ \ \"acc_norm_stderr\": 0.016501579306861677\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7091503267973857,\n \"acc_stderr\": 0.02600480036395213,\n\ \ \"acc_norm\": 0.7091503267973857,\n \"acc_norm_stderr\": 0.02600480036395213\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6784565916398714,\n\ \ \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.6784565916398714,\n\ \ \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.7006172839506173,\n \"acc_stderr\": 0.025483115601195455,\n\ \ \"acc_norm\": 0.7006172839506173,\n \"acc_norm_stderr\": 0.025483115601195455\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.4574468085106383,\n \"acc_stderr\": 0.029719281272236855,\n \ \ \"acc_norm\": 0.4574468085106383,\n \"acc_norm_stderr\": 0.029719281272236855\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.455019556714472,\n\ \ \"acc_stderr\": 0.012718456618701773,\n \"acc_norm\": 0.455019556714472,\n\ \ \"acc_norm_stderr\": 0.012718456618701773\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.6544117647058824,\n \"acc_stderr\": 0.02888819310398863,\n\ \ \"acc_norm\": 0.6544117647058824,\n \"acc_norm_stderr\": 0.02888819310398863\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.6388888888888888,\n \"acc_stderr\": 0.01943177567703731,\n \ \ \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.01943177567703731\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n\ \ \"acc_stderr\": 0.04494290866252089,\n \"acc_norm\": 0.6727272727272727,\n\ \ \"acc_norm_stderr\": 0.04494290866252089\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.028666857790274648,\n\ \ \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.028666857790274648\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8507462686567164,\n\ \ \"acc_stderr\": 0.02519692987482707,\n \"acc_norm\": 0.8507462686567164,\n\ \ \"acc_norm_stderr\": 0.02519692987482707\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.82,\n \"acc_stderr\": 0.03861229196653694,\n \ \ \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.03861229196653694\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n\ \ \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n\ \ \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.847953216374269,\n \"acc_stderr\": 0.02753912288906145,\n\ \ \"acc_norm\": 0.847953216374269,\n \"acc_norm_stderr\": 0.02753912288906145\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.40514075887392903,\n\ \ \"mc1_stderr\": 0.017185611727753368,\n \"mc2\": 0.5776708582574724,\n\ \ \"mc2_stderr\": 0.01544223129155929\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7750591949486977,\n \"acc_stderr\": 0.011735043564126735\n\ \ },\n \"harness|drop|3\": {\n \"em\": 0.0041946308724832215,\n \ \ \"em_stderr\": 0.0006618716168266571,\n \"f1\": 0.09125943791946291,\n\ \ \"f1_stderr\": 0.0018243790800558358\n },\n \"harness|gsm8k|5\":\ \ {\n \"acc\": 0.18423047763457165,\n \"acc_stderr\": 0.010678414428555008\n\ \ }\n}\n```" repo_url: https://huggingface.co/uukuguy/speechless-mistral-six-in-one-7b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|arc:challenge|25_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-12T18-14-50.698039.parquet' - config_name: harness_drop_3 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|drop|3_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-12T18-14-50.698039.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|gsm8k|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hellaswag|10_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T18-14-50.698039.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-14-50.698039.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T18-14-50.698039.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_12T18_14_50.698039 path: - '**/details_harness|winogrande|5_2023-11-12T18-14-50.698039.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-12T18-14-50.698039.parquet' - config_name: results data_files: - split: 2023_11_12T18_14_50.698039 path: - results_2023-11-12T18-14-50.698039.parquet - split: latest path: - results_2023-11-12T18-14-50.698039.parquet --- # Dataset Card for Evaluation run of uukuguy/speechless-mistral-six-in-one-7b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/uukuguy/speechless-mistral-six-in-one-7b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [uukuguy/speechless-mistral-six-in-one-7b](https://huggingface.co/uukuguy/speechless-mistral-six-in-one-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_uukuguy__speechless-mistral-six-in-one-7b_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-12T18:14:50.698039](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__speechless-mistral-six-in-one-7b_public/blob/main/results_2023-11-12T18-14-50.698039.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6276350372644707, "acc_stderr": 0.03243221410411415, "acc_norm": 0.636467872903276, "acc_norm_stderr": 0.03312856166774958, "mc1": 0.40514075887392903, "mc1_stderr": 0.017185611727753368, "mc2": 0.5776708582574724, "mc2_stderr": 0.01544223129155929, "em": 0.0041946308724832215, "em_stderr": 0.0006618716168266571, "f1": 0.09125943791946291, "f1_stderr": 0.0018243790800558358 }, "harness|arc:challenge|25": { "acc": 0.5981228668941979, "acc_stderr": 0.014327268614578276, "acc_norm": 0.6296928327645052, "acc_norm_stderr": 0.01411129875167495 }, "harness|hellaswag|10": { "acc": 0.652459669388568, "acc_stderr": 0.004752158936871872, "acc_norm": 0.8460466042620992, "acc_norm_stderr": 0.00360166483871892 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595853, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595853 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6578947368421053, "acc_stderr": 0.03860731599316092, "acc_norm": 0.6578947368421053, "acc_norm_stderr": 0.03860731599316092 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6792452830188679, "acc_stderr": 0.02872750295788027, "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.02872750295788027 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6184971098265896, "acc_stderr": 0.03703851193099521, "acc_norm": 0.6184971098265896, "acc_norm_stderr": 0.03703851193099521 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105654, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932261, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932261 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5319148936170213, "acc_stderr": 0.03261936918467382, "acc_norm": 0.5319148936170213, "acc_norm_stderr": 0.03261936918467382 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.04685473041907789, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.04685473041907789 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6, "acc_stderr": 0.040824829046386284, "acc_norm": 0.6, "acc_norm_stderr": 0.040824829046386284 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4074074074074074, "acc_stderr": 0.025305906241590626, "acc_norm": 0.4074074074074074, "acc_norm_stderr": 0.025305906241590626 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7677419354838709, "acc_stderr": 0.024022256130308235, "acc_norm": 0.7677419354838709, "acc_norm_stderr": 0.024022256130308235 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5270935960591133, "acc_stderr": 0.03512819077876106, "acc_norm": 0.5270935960591133, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494563, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494563 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8704663212435233, "acc_stderr": 0.024233532297758723, "acc_norm": 0.8704663212435233, "acc_norm_stderr": 0.024233532297758723 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6410256410256411, "acc_stderr": 0.024321738484602354, "acc_norm": 0.6410256410256411, "acc_norm_stderr": 0.024321738484602354 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34814814814814815, "acc_stderr": 0.029045600290616255, "acc_norm": 0.34814814814814815, "acc_norm_stderr": 0.029045600290616255 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.030388353551886783, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.030388353551886783 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8385321100917431, "acc_stderr": 0.015776239256163224, "acc_norm": 0.8385321100917431, "acc_norm_stderr": 0.015776239256163224 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49537037037037035, "acc_stderr": 0.03409825519163572, "acc_norm": 0.49537037037037035, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7990196078431373, "acc_stderr": 0.028125972265654373, "acc_norm": 0.7990196078431373, "acc_norm_stderr": 0.028125972265654373 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7637130801687764, "acc_stderr": 0.02765215314415927, "acc_norm": 0.7637130801687764, "acc_norm_stderr": 0.02765215314415927 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7557251908396947, "acc_stderr": 0.03768335959728742, "acc_norm": 0.7557251908396947, "acc_norm_stderr": 0.03768335959728742 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.03957835471980979, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.03957835471980979 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092375, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092375 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8122605363984674, "acc_stderr": 0.013964393769899136, "acc_norm": 0.8122605363984674, "acc_norm_stderr": 0.013964393769899136 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.708092485549133, "acc_stderr": 0.024476994076247337, "acc_norm": 0.708092485549133, "acc_norm_stderr": 0.024476994076247337 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.41899441340782123, "acc_stderr": 0.016501579306861677, "acc_norm": 0.41899441340782123, "acc_norm_stderr": 0.016501579306861677 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7091503267973857, "acc_stderr": 0.02600480036395213, "acc_norm": 0.7091503267973857, "acc_norm_stderr": 0.02600480036395213 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6784565916398714, "acc_stderr": 0.026527724079528872, "acc_norm": 0.6784565916398714, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7006172839506173, "acc_stderr": 0.025483115601195455, "acc_norm": 0.7006172839506173, "acc_norm_stderr": 0.025483115601195455 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4574468085106383, "acc_stderr": 0.029719281272236855, "acc_norm": 0.4574468085106383, "acc_norm_stderr": 0.029719281272236855 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.455019556714472, "acc_stderr": 0.012718456618701773, "acc_norm": 0.455019556714472, "acc_norm_stderr": 0.012718456618701773 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6544117647058824, "acc_stderr": 0.02888819310398863, "acc_norm": 0.6544117647058824, "acc_norm_stderr": 0.02888819310398863 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6388888888888888, "acc_stderr": 0.01943177567703731, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.01943177567703731 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.04494290866252089, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.04494290866252089 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.028666857790274648, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.028666857790274648 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8507462686567164, "acc_stderr": 0.02519692987482707, "acc_norm": 0.8507462686567164, "acc_norm_stderr": 0.02519692987482707 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.03861229196653694, "acc_norm": 0.82, "acc_norm_stderr": 0.03861229196653694 }, "harness|hendrycksTest-virology|5": { "acc": 0.5421686746987951, "acc_stderr": 0.0387862677100236, "acc_norm": 0.5421686746987951, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.847953216374269, "acc_stderr": 0.02753912288906145, "acc_norm": 0.847953216374269, "acc_norm_stderr": 0.02753912288906145 }, "harness|truthfulqa:mc|0": { "mc1": 0.40514075887392903, "mc1_stderr": 0.017185611727753368, "mc2": 0.5776708582574724, "mc2_stderr": 0.01544223129155929 }, "harness|winogrande|5": { "acc": 0.7750591949486977, "acc_stderr": 0.011735043564126735 }, "harness|drop|3": { "em": 0.0041946308724832215, "em_stderr": 0.0006618716168266571, "f1": 0.09125943791946291, "f1_stderr": 0.0018243790800558358 }, "harness|gsm8k|5": { "acc": 0.18423047763457165, "acc_stderr": 0.010678414428555008 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.6859762668609619, -0.8223750591278076, 0.26888182759284973, 0.24538378417491913, -0.2110406458377838, -0.09046467393636703, 0.031525325030088425, -0.2042095959186554, 0.5417664051055908, -0.008972336538136005, -0.48350515961647034, -0.686576783657074, -0.40563157200813293, 0.23036953806...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_PulsarAI__SlimOpenOrca-Mistral-7B-v2_public
open-llm-leaderboard
2023-11-12T18:19:53Z
0
0
null
[ "region:us" ]
2023-11-12T18:19:53Z
2023-11-12T18:18:53.000Z
2023-11-12T18:18:53
--- pretty_name: Evaluation run of PulsarAI/SlimOpenOrca-Mistral-7B-v2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [PulsarAI/SlimOpenOrca-Mistral-7B-v2](https://huggingface.co/PulsarAI/SlimOpenOrca-Mistral-7B-v2)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__SlimOpenOrca-Mistral-7B-v2_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-12T18:15:51.369317](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__SlimOpenOrca-Mistral-7B-v2_public/blob/main/results_2023-11-12T18-15-51.369317.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6159393027066592,\n\ \ \"acc_stderr\": 0.032593338844127864,\n \"acc_norm\": 0.6242559279403389,\n\ \ \"acc_norm_stderr\": 0.03329458303258477,\n \"mc1\": 0.3929008567931457,\n\ \ \"mc1_stderr\": 0.017097248285233065,\n \"mc2\": 0.5664808334981362,\n\ \ \"mc2_stderr\": 0.015491636686254535,\n \"em\": 0.004718959731543624,\n\ \ \"em_stderr\": 0.0007018360183131115,\n \"f1\": 0.09190750838926176,\n\ \ \"f1_stderr\": 0.0018302287340192876\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5938566552901023,\n \"acc_stderr\": 0.014351656690097858,\n\ \ \"acc_norm\": 0.628839590443686,\n \"acc_norm_stderr\": 0.014117971901142824\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6448914558852819,\n\ \ \"acc_stderr\": 0.004775681871529862,\n \"acc_norm\": 0.8340967934674368,\n\ \ \"acc_norm_stderr\": 0.003712334763856884\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \ \ \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5851851851851851,\n\ \ \"acc_stderr\": 0.04256193767901408,\n \"acc_norm\": 0.5851851851851851,\n\ \ \"acc_norm_stderr\": 0.04256193767901408\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n\ \ \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n\ \ \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \ \ \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.028815615713432108,\n\ \ \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.028815615713432108\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n\ \ \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n\ \ \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \ \ \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n\ \ \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5549132947976878,\n\ \ \"acc_stderr\": 0.03789401760283648,\n \"acc_norm\": 0.5549132947976878,\n\ \ \"acc_norm_stderr\": 0.03789401760283648\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n\ \ \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n\ \ \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.5319148936170213,\n \"acc_stderr\": 0.03261936918467381,\n\ \ \"acc_norm\": 0.5319148936170213,\n \"acc_norm_stderr\": 0.03261936918467381\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.42105263157894735,\n\ \ \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.42105263157894735,\n\ \ \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482758,\n\ \ \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482758\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.41534391534391535,\n \"acc_stderr\": 0.02537952491077839,\n \"\ acc_norm\": 0.41534391534391535,\n \"acc_norm_stderr\": 0.02537952491077839\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n\ \ \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n\ \ \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \ \ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n\ \ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7483870967741936,\n\ \ \"acc_stderr\": 0.024685979286239963,\n \"acc_norm\": 0.7483870967741936,\n\ \ \"acc_norm_stderr\": 0.024685979286239963\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.4630541871921182,\n \"acc_stderr\": 0.035083705204426656,\n\ \ \"acc_norm\": 0.4630541871921182,\n \"acc_norm_stderr\": 0.035083705204426656\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\"\ : 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n\ \ \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7575757575757576,\n \"acc_stderr\": 0.030532892233932022,\n \"\ acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.030532892233932022\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8549222797927462,\n \"acc_stderr\": 0.02541634309630645,\n\ \ \"acc_norm\": 0.8549222797927462,\n \"acc_norm_stderr\": 0.02541634309630645\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5974358974358974,\n \"acc_stderr\": 0.02486499515976775,\n \ \ \"acc_norm\": 0.5974358974358974,\n \"acc_norm_stderr\": 0.02486499515976775\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.35555555555555557,\n \"acc_stderr\": 0.029185714949857413,\n \ \ \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.029185714949857413\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.6386554621848739,\n \"acc_stderr\": 0.031204691225150016,\n\ \ \"acc_norm\": 0.6386554621848739,\n \"acc_norm_stderr\": 0.031204691225150016\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.271523178807947,\n \"acc_stderr\": 0.03631329803969653,\n \"acc_norm\"\ : 0.271523178807947,\n \"acc_norm_stderr\": 0.03631329803969653\n },\n\ \ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8311926605504587,\n\ \ \"acc_stderr\": 0.01606005626853035,\n \"acc_norm\": 0.8311926605504587,\n\ \ \"acc_norm_stderr\": 0.01606005626853035\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\ : {\n \"acc\": 0.49074074074074076,\n \"acc_stderr\": 0.034093869469927006,\n\ \ \"acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.034093869469927006\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.8137254901960784,\n \"acc_stderr\": 0.027325470966716312,\n \"\ acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.027325470966716312\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7890295358649789,\n \"acc_stderr\": 0.02655837250266192,\n \ \ \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.02655837250266192\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6547085201793722,\n\ \ \"acc_stderr\": 0.03191100192835794,\n \"acc_norm\": 0.6547085201793722,\n\ \ \"acc_norm_stderr\": 0.03191100192835794\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596915,\n\ \ \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596915\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7933884297520661,\n \"acc_stderr\": 0.036959801280988226,\n \"\ acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.036959801280988226\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7314814814814815,\n\ \ \"acc_stderr\": 0.042844679680521934,\n \"acc_norm\": 0.7314814814814815,\n\ \ \"acc_norm_stderr\": 0.042844679680521934\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n\ \ \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n\ \ \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n\ \ \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n\ \ \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n\ \ \"acc_stderr\": 0.023086635086841407,\n \"acc_norm\": 0.8547008547008547,\n\ \ \"acc_norm_stderr\": 0.023086635086841407\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \ \ \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.80970625798212,\n\ \ \"acc_stderr\": 0.014036945850381401,\n \"acc_norm\": 0.80970625798212,\n\ \ \"acc_norm_stderr\": 0.014036945850381401\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.024946792225272314,\n\ \ \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.024946792225272314\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3474860335195531,\n\ \ \"acc_stderr\": 0.01592556406020815,\n \"acc_norm\": 0.3474860335195531,\n\ \ \"acc_norm_stderr\": 0.01592556406020815\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6993464052287581,\n \"acc_stderr\": 0.026256053835718964,\n\ \ \"acc_norm\": 0.6993464052287581,\n \"acc_norm_stderr\": 0.026256053835718964\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6655948553054662,\n\ \ \"acc_stderr\": 0.026795422327893937,\n \"acc_norm\": 0.6655948553054662,\n\ \ \"acc_norm_stderr\": 0.026795422327893937\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.7160493827160493,\n \"acc_stderr\": 0.025089478523765134,\n\ \ \"acc_norm\": 0.7160493827160493,\n \"acc_norm_stderr\": 0.025089478523765134\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.43617021276595747,\n \"acc_stderr\": 0.02958345203628407,\n \ \ \"acc_norm\": 0.43617021276595747,\n \"acc_norm_stderr\": 0.02958345203628407\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4530638852672751,\n\ \ \"acc_stderr\": 0.012713845972358978,\n \"acc_norm\": 0.4530638852672751,\n\ \ \"acc_norm_stderr\": 0.012713845972358978\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.6066176470588235,\n \"acc_stderr\": 0.029674288281311155,\n\ \ \"acc_norm\": 0.6066176470588235,\n \"acc_norm_stderr\": 0.029674288281311155\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.6421568627450981,\n \"acc_stderr\": 0.019393058402355442,\n \ \ \"acc_norm\": 0.6421568627450981,\n \"acc_norm_stderr\": 0.019393058402355442\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n\ \ \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.6363636363636364,\n\ \ \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.028920583220675606,\n\ \ \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.028920583220675606\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n\ \ \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n\ \ \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5120481927710844,\n\ \ \"acc_stderr\": 0.03891364495835817,\n \"acc_norm\": 0.5120481927710844,\n\ \ \"acc_norm_stderr\": 0.03891364495835817\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n\ \ \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3929008567931457,\n\ \ \"mc1_stderr\": 0.017097248285233065,\n \"mc2\": 0.5664808334981362,\n\ \ \"mc2_stderr\": 0.015491636686254535\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7758484609313339,\n \"acc_stderr\": 0.011720400740774099\n\ \ },\n \"harness|drop|3\": {\n \"em\": 0.004718959731543624,\n \ \ \"em_stderr\": 0.0007018360183131115,\n \"f1\": 0.09190750838926176,\n\ \ \"f1_stderr\": 0.0018302287340192876\n },\n \"harness|gsm8k|5\":\ \ {\n \"acc\": 0.18953752843062927,\n \"acc_stderr\": 0.010795837931896387\n\ \ }\n}\n```" repo_url: https://huggingface.co/PulsarAI/SlimOpenOrca-Mistral-7B-v2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|arc:challenge|25_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-12T18-15-51.369317.parquet' - config_name: harness_drop_3 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|drop|3_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-12T18-15-51.369317.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|gsm8k|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hellaswag|10_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T18-15-51.369317.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T18-15-51.369317.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T18-15-51.369317.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_12T18_15_51.369317 path: - '**/details_harness|winogrande|5_2023-11-12T18-15-51.369317.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-12T18-15-51.369317.parquet' - config_name: results data_files: - split: 2023_11_12T18_15_51.369317 path: - results_2023-11-12T18-15-51.369317.parquet - split: latest path: - results_2023-11-12T18-15-51.369317.parquet --- # Dataset Card for Evaluation run of PulsarAI/SlimOpenOrca-Mistral-7B-v2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PulsarAI/SlimOpenOrca-Mistral-7B-v2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [PulsarAI/SlimOpenOrca-Mistral-7B-v2](https://huggingface.co/PulsarAI/SlimOpenOrca-Mistral-7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PulsarAI__SlimOpenOrca-Mistral-7B-v2_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-12T18:15:51.369317](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__SlimOpenOrca-Mistral-7B-v2_public/blob/main/results_2023-11-12T18-15-51.369317.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6159393027066592, "acc_stderr": 0.032593338844127864, "acc_norm": 0.6242559279403389, "acc_norm_stderr": 0.03329458303258477, "mc1": 0.3929008567931457, "mc1_stderr": 0.017097248285233065, "mc2": 0.5664808334981362, "mc2_stderr": 0.015491636686254535, "em": 0.004718959731543624, "em_stderr": 0.0007018360183131115, "f1": 0.09190750838926176, "f1_stderr": 0.0018302287340192876 }, "harness|arc:challenge|25": { "acc": 0.5938566552901023, "acc_stderr": 0.014351656690097858, "acc_norm": 0.628839590443686, "acc_norm_stderr": 0.014117971901142824 }, "harness|hellaswag|10": { "acc": 0.6448914558852819, "acc_stderr": 0.004775681871529862, "acc_norm": 0.8340967934674368, "acc_norm_stderr": 0.003712334763856884 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5851851851851851, "acc_stderr": 0.04256193767901408, "acc_norm": 0.5851851851851851, "acc_norm_stderr": 0.04256193767901408 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6754716981132075, "acc_stderr": 0.028815615713432108, "acc_norm": 0.6754716981132075, "acc_norm_stderr": 0.028815615713432108 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5549132947976878, "acc_stderr": 0.03789401760283648, "acc_norm": 0.5549132947976878, "acc_norm_stderr": 0.03789401760283648 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5319148936170213, "acc_stderr": 0.03261936918467381, "acc_norm": 0.5319148936170213, "acc_norm_stderr": 0.03261936918467381 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.42105263157894735, "acc_stderr": 0.046446020912223177, "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.046446020912223177 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482758, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482758 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41534391534391535, "acc_stderr": 0.02537952491077839, "acc_norm": 0.41534391534391535, "acc_norm_stderr": 0.02537952491077839 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7483870967741936, "acc_stderr": 0.024685979286239963, "acc_norm": 0.7483870967741936, "acc_norm_stderr": 0.024685979286239963 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4630541871921182, "acc_stderr": 0.035083705204426656, "acc_norm": 0.4630541871921182, "acc_norm_stderr": 0.035083705204426656 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7575757575757576, "acc_stderr": 0.030532892233932022, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.030532892233932022 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8549222797927462, "acc_stderr": 0.02541634309630645, "acc_norm": 0.8549222797927462, "acc_norm_stderr": 0.02541634309630645 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5974358974358974, "acc_stderr": 0.02486499515976775, "acc_norm": 0.5974358974358974, "acc_norm_stderr": 0.02486499515976775 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.029185714949857413, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.029185714949857413 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6386554621848739, "acc_stderr": 0.031204691225150016, "acc_norm": 0.6386554621848739, "acc_norm_stderr": 0.031204691225150016 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.271523178807947, "acc_stderr": 0.03631329803969653, "acc_norm": 0.271523178807947, "acc_norm_stderr": 0.03631329803969653 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8311926605504587, "acc_stderr": 0.01606005626853035, "acc_norm": 0.8311926605504587, "acc_norm_stderr": 0.01606005626853035 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49074074074074076, "acc_stderr": 0.034093869469927006, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8137254901960784, "acc_stderr": 0.027325470966716312, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.027325470966716312 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7890295358649789, "acc_stderr": 0.02655837250266192, "acc_norm": 0.7890295358649789, "acc_norm_stderr": 0.02655837250266192 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6547085201793722, "acc_stderr": 0.03191100192835794, "acc_norm": 0.6547085201793722, "acc_norm_stderr": 0.03191100192835794 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596915, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596915 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.036959801280988226, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.036959801280988226 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7314814814814815, "acc_stderr": 0.042844679680521934, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.042844679680521934 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489123, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.039891398595317706, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.039891398595317706 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.023086635086841407, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.023086635086841407 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.80970625798212, "acc_stderr": 0.014036945850381401, "acc_norm": 0.80970625798212, "acc_norm_stderr": 0.014036945850381401 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6878612716763006, "acc_stderr": 0.024946792225272314, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.024946792225272314 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3474860335195531, "acc_stderr": 0.01592556406020815, "acc_norm": 0.3474860335195531, "acc_norm_stderr": 0.01592556406020815 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6993464052287581, "acc_stderr": 0.026256053835718964, "acc_norm": 0.6993464052287581, "acc_norm_stderr": 0.026256053835718964 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6655948553054662, "acc_stderr": 0.026795422327893937, "acc_norm": 0.6655948553054662, "acc_norm_stderr": 0.026795422327893937 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7160493827160493, "acc_stderr": 0.025089478523765134, "acc_norm": 0.7160493827160493, "acc_norm_stderr": 0.025089478523765134 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.43617021276595747, "acc_stderr": 0.02958345203628407, "acc_norm": 0.43617021276595747, "acc_norm_stderr": 0.02958345203628407 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4530638852672751, "acc_stderr": 0.012713845972358978, "acc_norm": 0.4530638852672751, "acc_norm_stderr": 0.012713845972358978 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6066176470588235, "acc_stderr": 0.029674288281311155, "acc_norm": 0.6066176470588235, "acc_norm_stderr": 0.029674288281311155 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6421568627450981, "acc_stderr": 0.019393058402355442, "acc_norm": 0.6421568627450981, "acc_norm_stderr": 0.019393058402355442 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.04607582090719976, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.04607582090719976 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7142857142857143, "acc_stderr": 0.028920583220675606, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.028920583220675606 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.5120481927710844, "acc_stderr": 0.03891364495835817, "acc_norm": 0.5120481927710844, "acc_norm_stderr": 0.03891364495835817 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8070175438596491, "acc_stderr": 0.030267457554898458, "acc_norm": 0.8070175438596491, "acc_norm_stderr": 0.030267457554898458 }, "harness|truthfulqa:mc|0": { "mc1": 0.3929008567931457, "mc1_stderr": 0.017097248285233065, "mc2": 0.5664808334981362, "mc2_stderr": 0.015491636686254535 }, "harness|winogrande|5": { "acc": 0.7758484609313339, "acc_stderr": 0.011720400740774099 }, "harness|drop|3": { "em": 0.004718959731543624, "em_stderr": 0.0007018360183131115, "f1": 0.09190750838926176, "f1_stderr": 0.0018302287340192876 }, "harness|gsm8k|5": { "acc": 0.18953752843062927, "acc_stderr": 0.010795837931896387 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7197391390800476, -0.8381004929542542, 0.27162399888038635, 0.24606624245643616, -0.16360437870025635, -0.08818823099136353, 0.00825541466474533, -0.19388625025749207, 0.5599177479743958, -0.023972297087311745, -0.48173925280570984, -0.681281566619873, -0.4343359172344208, 0.25488474965...
null
null
null
null
null
null
null
null
null
null
null
null
null
hoanganhknk/zalo
hoanganhknk
2023-11-12T19:13:41Z
0
0
null
[ "region:us" ]
2023-11-12T19:13:41Z
2023-11-12T18:36:31.000Z
2023-11-12T18:36:31
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: temp dtype: string splits: - name: train num_bytes: 71711.0 num_examples: 1 download_size: 72626 dataset_size: 71711.0 --- # Dataset Card for "zalo" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5622854232788086, -0.16898906230926514, 0.21291857957839966, 0.11394908279180527, -0.16081315279006958, 0.019575851038098335, 0.32902324199676514, -0.2791028320789337, 0.8802366256713867, 0.511104941368103, -0.9122817516326904, -0.7753266096115112, -0.5347784161567688, -0.18530137836933...
null
null
null
null
null
null
null
null
null
null
null
null
null
slangdawang/your-dataset-name
slangdawang
2023-11-12T19:30:05Z
0
0
null
[ "region:us" ]
2023-11-12T19:30:05Z
2023-11-12T19:28:34.000Z
2023-11-12T19:28:34
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
xwar/ninox_dataset
xwar
2023-11-12T19:46:37Z
0
0
null
[ "region:us" ]
2023-11-12T19:46:37Z
2023-11-12T19:45:41.000Z
2023-11-12T19:45:41
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_Expert68__llama2_13b_instructed_version2_public
open-llm-leaderboard
2023-11-12T19:48:46Z
0
0
null
[ "region:us" ]
2023-11-12T19:48:46Z
2023-11-12T19:47:45.000Z
2023-11-12T19:47:45
--- pretty_name: Evaluation run of Expert68/llama2_13b_instructed_version2 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Expert68/llama2_13b_instructed_version2](https://huggingface.co/Expert68/llama2_13b_instructed_version2)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Expert68__llama2_13b_instructed_version2_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-12T19:44:39.658427](https://huggingface.co/datasets/open-llm-leaderboard/details_Expert68__llama2_13b_instructed_version2_public/blob/main/results_2023-11-12T19-44-39.658427.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5535385938067054,\n\ \ \"acc_stderr\": 0.03382379046360409,\n \"acc_norm\": 0.5616374813808622,\n\ \ \"acc_norm_stderr\": 0.034597480068222046,\n \"mc1\": 0.31456548347613217,\n\ \ \"mc1_stderr\": 0.01625524199317918,\n \"mc2\": 0.46118545589659976,\n\ \ \"mc2_stderr\": 0.015483508114692393,\n \"em\": 0.007340604026845637,\n\ \ \"em_stderr\": 0.0008741896875345934,\n \"f1\": 0.07567323825503336,\n\ \ \"f1_stderr\": 0.0016747744191590948\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5631399317406144,\n \"acc_stderr\": 0.014494421584256519,\n\ \ \"acc_norm\": 0.6006825938566553,\n \"acc_norm_stderr\": 0.014312094557946705\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6412069308902609,\n\ \ \"acc_stderr\": 0.004786660691181909,\n \"acc_norm\": 0.8404700258912567,\n\ \ \"acc_norm_stderr\": 0.003654212329516619\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n\ \ \"acc_stderr\": 0.04313531696750574,\n \"acc_norm\": 0.4740740740740741,\n\ \ \"acc_norm_stderr\": 0.04313531696750574\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5460526315789473,\n \"acc_stderr\": 0.04051646342874142,\n\ \ \"acc_norm\": 0.5460526315789473,\n \"acc_norm_stderr\": 0.04051646342874142\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n\ \ \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5924528301886792,\n \"acc_stderr\": 0.030242233800854494,\n\ \ \"acc_norm\": 0.5924528301886792,\n \"acc_norm_stderr\": 0.030242233800854494\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.625,\n\ \ \"acc_stderr\": 0.04048439222695598,\n \"acc_norm\": 0.625,\n \ \ \"acc_norm_stderr\": 0.04048439222695598\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n\ \ \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5144508670520231,\n\ \ \"acc_stderr\": 0.03810871630454764,\n \"acc_norm\": 0.5144508670520231,\n\ \ \"acc_norm_stderr\": 0.03810871630454764\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.04280105837364397,\n\ \ \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.04280105837364397\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n\ \ \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.451063829787234,\n \"acc_stderr\": 0.032529096196131965,\n\ \ \"acc_norm\": 0.451063829787234,\n \"acc_norm_stderr\": 0.032529096196131965\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3333333333333333,\n\ \ \"acc_stderr\": 0.044346007015849245,\n \"acc_norm\": 0.3333333333333333,\n\ \ \"acc_norm_stderr\": 0.044346007015849245\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.46206896551724136,\n \"acc_stderr\": 0.041546596717075474,\n\ \ \"acc_norm\": 0.46206896551724136,\n \"acc_norm_stderr\": 0.041546596717075474\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.32275132275132273,\n \"acc_stderr\": 0.024078943243597016,\n \"\ acc_norm\": 0.32275132275132273,\n \"acc_norm_stderr\": 0.024078943243597016\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.35714285714285715,\n\ \ \"acc_stderr\": 0.042857142857142816,\n \"acc_norm\": 0.35714285714285715,\n\ \ \"acc_norm_stderr\": 0.042857142857142816\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \ \ \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6483870967741936,\n\ \ \"acc_stderr\": 0.02716253782694846,\n \"acc_norm\": 0.6483870967741936,\n\ \ \"acc_norm_stderr\": 0.02716253782694846\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.43842364532019706,\n \"acc_stderr\": 0.03491207857486518,\n\ \ \"acc_norm\": 0.43842364532019706,\n \"acc_norm_stderr\": 0.03491207857486518\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\"\ : 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.696969696969697,\n \"acc_stderr\": 0.03588624800091707,\n\ \ \"acc_norm\": 0.696969696969697,\n \"acc_norm_stderr\": 0.03588624800091707\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.6717171717171717,\n \"acc_stderr\": 0.03345678422756776,\n \"\ acc_norm\": 0.6717171717171717,\n \"acc_norm_stderr\": 0.03345678422756776\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8238341968911918,\n \"acc_stderr\": 0.027493504244548057,\n\ \ \"acc_norm\": 0.8238341968911918,\n \"acc_norm_stderr\": 0.027493504244548057\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5153846153846153,\n \"acc_stderr\": 0.025339003010106515,\n\ \ \"acc_norm\": 0.5153846153846153,\n \"acc_norm_stderr\": 0.025339003010106515\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.26666666666666666,\n \"acc_stderr\": 0.026962424325073835,\n \ \ \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.026962424325073835\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.592436974789916,\n \"acc_stderr\": 0.031918633744784645,\n \ \ \"acc_norm\": 0.592436974789916,\n \"acc_norm_stderr\": 0.031918633744784645\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"\ acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.728440366972477,\n \"acc_stderr\": 0.019069098363191428,\n \"\ acc_norm\": 0.728440366972477,\n \"acc_norm_stderr\": 0.019069098363191428\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.42592592592592593,\n \"acc_stderr\": 0.03372343271653064,\n \"\ acc_norm\": 0.42592592592592593,\n \"acc_norm_stderr\": 0.03372343271653064\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7745098039215687,\n \"acc_stderr\": 0.02933116229425174,\n \"\ acc_norm\": 0.7745098039215687,\n \"acc_norm_stderr\": 0.02933116229425174\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7383966244725738,\n \"acc_stderr\": 0.028609516716994934,\n \ \ \"acc_norm\": 0.7383966244725738,\n \"acc_norm_stderr\": 0.028609516716994934\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n\ \ \"acc_stderr\": 0.031381476375754995,\n \"acc_norm\": 0.6771300448430493,\n\ \ \"acc_norm_stderr\": 0.031381476375754995\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.5801526717557252,\n \"acc_stderr\": 0.04328577215262971,\n\ \ \"acc_norm\": 0.5801526717557252,\n \"acc_norm_stderr\": 0.04328577215262971\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7107438016528925,\n \"acc_stderr\": 0.041391127276354626,\n \"\ acc_norm\": 0.7107438016528925,\n \"acc_norm_stderr\": 0.041391127276354626\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n\ \ \"acc_stderr\": 0.044531975073749834,\n \"acc_norm\": 0.6944444444444444,\n\ \ \"acc_norm_stderr\": 0.044531975073749834\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6687116564417178,\n \"acc_stderr\": 0.03697983910025588,\n\ \ \"acc_norm\": 0.6687116564417178,\n \"acc_norm_stderr\": 0.03697983910025588\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.39285714285714285,\n\ \ \"acc_stderr\": 0.04635550135609976,\n \"acc_norm\": 0.39285714285714285,\n\ \ \"acc_norm_stderr\": 0.04635550135609976\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7184466019417476,\n \"acc_stderr\": 0.04453254836326467,\n\ \ \"acc_norm\": 0.7184466019417476,\n \"acc_norm_stderr\": 0.04453254836326467\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8205128205128205,\n\ \ \"acc_stderr\": 0.025140935950335442,\n \"acc_norm\": 0.8205128205128205,\n\ \ \"acc_norm_stderr\": 0.025140935950335442\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \ \ \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7637292464878672,\n\ \ \"acc_stderr\": 0.01519047371703751,\n \"acc_norm\": 0.7637292464878672,\n\ \ \"acc_norm_stderr\": 0.01519047371703751\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.02607431485165708,\n\ \ \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.02607431485165708\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4011173184357542,\n\ \ \"acc_stderr\": 0.01639222189940707,\n \"acc_norm\": 0.4011173184357542,\n\ \ \"acc_norm_stderr\": 0.01639222189940707\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5915032679738562,\n \"acc_stderr\": 0.028146405993096358,\n\ \ \"acc_norm\": 0.5915032679738562,\n \"acc_norm_stderr\": 0.028146405993096358\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6527331189710611,\n\ \ \"acc_stderr\": 0.027040745502307336,\n \"acc_norm\": 0.6527331189710611,\n\ \ \"acc_norm_stderr\": 0.027040745502307336\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6604938271604939,\n \"acc_stderr\": 0.026348564412011624,\n\ \ \"acc_norm\": 0.6604938271604939,\n \"acc_norm_stderr\": 0.026348564412011624\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.43617021276595747,\n \"acc_stderr\": 0.02958345203628407,\n \ \ \"acc_norm\": 0.43617021276595747,\n \"acc_norm_stderr\": 0.02958345203628407\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4380704041720991,\n\ \ \"acc_stderr\": 0.01267190278256765,\n \"acc_norm\": 0.4380704041720991,\n\ \ \"acc_norm_stderr\": 0.01267190278256765\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5330882352941176,\n \"acc_stderr\": 0.03030625772246831,\n\ \ \"acc_norm\": 0.5330882352941176,\n \"acc_norm_stderr\": 0.03030625772246831\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5751633986928104,\n \"acc_stderr\": 0.01999797303545833,\n \ \ \"acc_norm\": 0.5751633986928104,\n \"acc_norm_stderr\": 0.01999797303545833\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n\ \ \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.6363636363636364,\n\ \ \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6,\n \"acc_stderr\": 0.03136250240935893,\n \ \ \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.03136250240935893\n },\n\ \ \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7164179104477612,\n\ \ \"acc_stderr\": 0.031871875379197966,\n \"acc_norm\": 0.7164179104477612,\n\ \ \"acc_norm_stderr\": 0.031871875379197966\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.45180722891566266,\n\ \ \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.45180722891566266,\n\ \ \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7660818713450293,\n \"acc_stderr\": 0.03246721765117826,\n\ \ \"acc_norm\": 0.7660818713450293,\n \"acc_norm_stderr\": 0.03246721765117826\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.31456548347613217,\n\ \ \"mc1_stderr\": 0.01625524199317918,\n \"mc2\": 0.46118545589659976,\n\ \ \"mc2_stderr\": 0.015483508114692393\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7561168113654302,\n \"acc_stderr\": 0.012068923278908194\n\ \ },\n \"harness|drop|3\": {\n \"em\": 0.007340604026845637,\n \ \ \"em_stderr\": 0.0008741896875345934,\n \"f1\": 0.07567323825503336,\n\ \ \"f1_stderr\": 0.0016747744191590948\n },\n \"harness|gsm8k|5\":\ \ {\n \"acc\": 0.10993176648976498,\n \"acc_stderr\": 0.008616195587865397\n\ \ }\n}\n```" repo_url: https://huggingface.co/Expert68/llama2_13b_instructed_version2 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|arc:challenge|25_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-12T19-44-39.658427.parquet' - config_name: harness_drop_3 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|drop|3_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-12T19-44-39.658427.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|gsm8k|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hellaswag|10_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T19-44-39.658427.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-44-39.658427.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T19-44-39.658427.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_12T19_44_39.658427 path: - '**/details_harness|winogrande|5_2023-11-12T19-44-39.658427.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-12T19-44-39.658427.parquet' - config_name: results data_files: - split: 2023_11_12T19_44_39.658427 path: - results_2023-11-12T19-44-39.658427.parquet - split: latest path: - results_2023-11-12T19-44-39.658427.parquet --- # Dataset Card for Evaluation run of Expert68/llama2_13b_instructed_version2 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Expert68/llama2_13b_instructed_version2 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [Expert68/llama2_13b_instructed_version2](https://huggingface.co/Expert68/llama2_13b_instructed_version2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Expert68__llama2_13b_instructed_version2_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-12T19:44:39.658427](https://huggingface.co/datasets/open-llm-leaderboard/details_Expert68__llama2_13b_instructed_version2_public/blob/main/results_2023-11-12T19-44-39.658427.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5535385938067054, "acc_stderr": 0.03382379046360409, "acc_norm": 0.5616374813808622, "acc_norm_stderr": 0.034597480068222046, "mc1": 0.31456548347613217, "mc1_stderr": 0.01625524199317918, "mc2": 0.46118545589659976, "mc2_stderr": 0.015483508114692393, "em": 0.007340604026845637, "em_stderr": 0.0008741896875345934, "f1": 0.07567323825503336, "f1_stderr": 0.0016747744191590948 }, "harness|arc:challenge|25": { "acc": 0.5631399317406144, "acc_stderr": 0.014494421584256519, "acc_norm": 0.6006825938566553, "acc_norm_stderr": 0.014312094557946705 }, "harness|hellaswag|10": { "acc": 0.6412069308902609, "acc_stderr": 0.004786660691181909, "acc_norm": 0.8404700258912567, "acc_norm_stderr": 0.003654212329516619 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750574, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750574 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5460526315789473, "acc_stderr": 0.04051646342874142, "acc_norm": 0.5460526315789473, "acc_norm_stderr": 0.04051646342874142 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5924528301886792, "acc_stderr": 0.030242233800854494, "acc_norm": 0.5924528301886792, "acc_norm_stderr": 0.030242233800854494 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.625, "acc_stderr": 0.04048439222695598, "acc_norm": 0.625, "acc_norm_stderr": 0.04048439222695598 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5144508670520231, "acc_stderr": 0.03810871630454764, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.03810871630454764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.04280105837364397, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.04280105837364397 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.451063829787234, "acc_stderr": 0.032529096196131965, "acc_norm": 0.451063829787234, "acc_norm_stderr": 0.032529096196131965 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.044346007015849245, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.044346007015849245 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.46206896551724136, "acc_stderr": 0.041546596717075474, "acc_norm": 0.46206896551724136, "acc_norm_stderr": 0.041546596717075474 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.32275132275132273, "acc_stderr": 0.024078943243597016, "acc_norm": 0.32275132275132273, "acc_norm_stderr": 0.024078943243597016 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.35714285714285715, "acc_stderr": 0.042857142857142816, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.042857142857142816 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6483870967741936, "acc_stderr": 0.02716253782694846, "acc_norm": 0.6483870967741936, "acc_norm_stderr": 0.02716253782694846 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.43842364532019706, "acc_stderr": 0.03491207857486518, "acc_norm": 0.43842364532019706, "acc_norm_stderr": 0.03491207857486518 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.696969696969697, "acc_stderr": 0.03588624800091707, "acc_norm": 0.696969696969697, "acc_norm_stderr": 0.03588624800091707 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.6717171717171717, "acc_stderr": 0.03345678422756776, "acc_norm": 0.6717171717171717, "acc_norm_stderr": 0.03345678422756776 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8238341968911918, "acc_stderr": 0.027493504244548057, "acc_norm": 0.8238341968911918, "acc_norm_stderr": 0.027493504244548057 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5153846153846153, "acc_stderr": 0.025339003010106515, "acc_norm": 0.5153846153846153, "acc_norm_stderr": 0.025339003010106515 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26666666666666666, "acc_stderr": 0.026962424325073835, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.026962424325073835 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.592436974789916, "acc_stderr": 0.031918633744784645, "acc_norm": 0.592436974789916, "acc_norm_stderr": 0.031918633744784645 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.728440366972477, "acc_stderr": 0.019069098363191428, "acc_norm": 0.728440366972477, "acc_norm_stderr": 0.019069098363191428 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.42592592592592593, "acc_stderr": 0.03372343271653064, "acc_norm": 0.42592592592592593, "acc_norm_stderr": 0.03372343271653064 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7745098039215687, "acc_stderr": 0.02933116229425174, "acc_norm": 0.7745098039215687, "acc_norm_stderr": 0.02933116229425174 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7383966244725738, "acc_stderr": 0.028609516716994934, "acc_norm": 0.7383966244725738, "acc_norm_stderr": 0.028609516716994934 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.031381476375754995, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.031381476375754995 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5801526717557252, "acc_stderr": 0.04328577215262971, "acc_norm": 0.5801526717557252, "acc_norm_stderr": 0.04328577215262971 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7107438016528925, "acc_stderr": 0.041391127276354626, "acc_norm": 0.7107438016528925, "acc_norm_stderr": 0.041391127276354626 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6944444444444444, "acc_stderr": 0.044531975073749834, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.044531975073749834 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6687116564417178, "acc_stderr": 0.03697983910025588, "acc_norm": 0.6687116564417178, "acc_norm_stderr": 0.03697983910025588 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.39285714285714285, "acc_stderr": 0.04635550135609976, "acc_norm": 0.39285714285714285, "acc_norm_stderr": 0.04635550135609976 }, "harness|hendrycksTest-management|5": { "acc": 0.7184466019417476, "acc_stderr": 0.04453254836326467, "acc_norm": 0.7184466019417476, "acc_norm_stderr": 0.04453254836326467 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8205128205128205, "acc_stderr": 0.025140935950335442, "acc_norm": 0.8205128205128205, "acc_norm_stderr": 0.025140935950335442 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7637292464878672, "acc_stderr": 0.01519047371703751, "acc_norm": 0.7637292464878672, "acc_norm_stderr": 0.01519047371703751 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6242774566473989, "acc_stderr": 0.02607431485165708, "acc_norm": 0.6242774566473989, "acc_norm_stderr": 0.02607431485165708 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4011173184357542, "acc_stderr": 0.01639222189940707, "acc_norm": 0.4011173184357542, "acc_norm_stderr": 0.01639222189940707 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5915032679738562, "acc_stderr": 0.028146405993096358, "acc_norm": 0.5915032679738562, "acc_norm_stderr": 0.028146405993096358 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6527331189710611, "acc_stderr": 0.027040745502307336, "acc_norm": 0.6527331189710611, "acc_norm_stderr": 0.027040745502307336 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6604938271604939, "acc_stderr": 0.026348564412011624, "acc_norm": 0.6604938271604939, "acc_norm_stderr": 0.026348564412011624 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.43617021276595747, "acc_stderr": 0.02958345203628407, "acc_norm": 0.43617021276595747, "acc_norm_stderr": 0.02958345203628407 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4380704041720991, "acc_stderr": 0.01267190278256765, "acc_norm": 0.4380704041720991, "acc_norm_stderr": 0.01267190278256765 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5330882352941176, "acc_stderr": 0.03030625772246831, "acc_norm": 0.5330882352941176, "acc_norm_stderr": 0.03030625772246831 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5751633986928104, "acc_stderr": 0.01999797303545833, "acc_norm": 0.5751633986928104, "acc_norm_stderr": 0.01999797303545833 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.04607582090719976, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.04607582090719976 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6, "acc_stderr": 0.03136250240935893, "acc_norm": 0.6, "acc_norm_stderr": 0.03136250240935893 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7164179104477612, "acc_stderr": 0.031871875379197966, "acc_norm": 0.7164179104477612, "acc_norm_stderr": 0.031871875379197966 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.45180722891566266, "acc_stderr": 0.03874371556587953, "acc_norm": 0.45180722891566266, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7660818713450293, "acc_stderr": 0.03246721765117826, "acc_norm": 0.7660818713450293, "acc_norm_stderr": 0.03246721765117826 }, "harness|truthfulqa:mc|0": { "mc1": 0.31456548347613217, "mc1_stderr": 0.01625524199317918, "mc2": 0.46118545589659976, "mc2_stderr": 0.015483508114692393 }, "harness|winogrande|5": { "acc": 0.7561168113654302, "acc_stderr": 0.012068923278908194 }, "harness|drop|3": { "em": 0.007340604026845637, "em_stderr": 0.0008741896875345934, "f1": 0.07567323825503336, "f1_stderr": 0.0016747744191590948 }, "harness|gsm8k|5": { "acc": 0.10993176648976498, "acc_stderr": 0.008616195587865397 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.6978379487991333, -0.8492630124092102, 0.2816261351108551, 0.22496838867664337, -0.19666212797164917, -0.048574719578027725, 0.0173408892005682, -0.26136597990989685, 0.577582061290741, -0.03675682842731476, -0.48350533843040466, -0.6708272695541382, -0.45188596844673157, 0.246452569961...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_NeverSleep__Mistral-11B-SynthIAirOmniMix_public
open-llm-leaderboard
2023-11-12T19:58:59Z
0
0
null
[ "region:us" ]
2023-11-12T19:58:59Z
2023-11-12T19:57:59.000Z
2023-11-12T19:57:59
--- pretty_name: Evaluation run of NeverSleep/Mistral-11B-SynthIAirOmniMix dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [NeverSleep/Mistral-11B-SynthIAirOmniMix](https://huggingface.co/NeverSleep/Mistral-11B-SynthIAirOmniMix)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NeverSleep__Mistral-11B-SynthIAirOmniMix_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-12T19:54:58.939194](https://huggingface.co/datasets/open-llm-leaderboard/details_NeverSleep__Mistral-11B-SynthIAirOmniMix_public/blob/main/results_2023-11-12T19-54-58.939194.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6277127436205546,\n\ \ \"acc_stderr\": 0.03243061765974366,\n \"acc_norm\": 0.6378229900253635,\n\ \ \"acc_norm_stderr\": 0.03315507636067878,\n \"mc1\": 0.3880048959608323,\n\ \ \"mc1_stderr\": 0.017058761501347972,\n \"mc2\": 0.5568818997417452,\n\ \ \"mc2_stderr\": 0.015517245006607807,\n \"em\": 0.23259228187919462,\n\ \ \"em_stderr\": 0.004326636227794088,\n \"f1\": 0.28881291946308657,\n\ \ \"f1_stderr\": 0.004306419385994737\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5921501706484642,\n \"acc_stderr\": 0.014361097288449705,\n\ \ \"acc_norm\": 0.6245733788395904,\n \"acc_norm_stderr\": 0.014150631435111728\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6396136227843059,\n\ \ \"acc_stderr\": 0.004791313101877047,\n \"acc_norm\": 0.8313085042820155,\n\ \ \"acc_norm_stderr\": 0.003737138752336941\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411022,\n \ \ \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411022\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n\ \ \"acc_stderr\": 0.0421850621536888,\n \"acc_norm\": 0.6074074074074074,\n\ \ \"acc_norm_stderr\": 0.0421850621536888\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.625,\n \"acc_stderr\": 0.039397364351956274,\n \ \ \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.039397364351956274\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n\ \ \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \ \ \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.028815615713432115,\n\ \ \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.028815615713432115\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7222222222222222,\n\ \ \"acc_stderr\": 0.03745554791462456,\n \"acc_norm\": 0.7222222222222222,\n\ \ \"acc_norm_stderr\": 0.03745554791462456\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \ \ \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\"\ : 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237101,\n \ \ \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237101\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.630057803468208,\n\ \ \"acc_stderr\": 0.0368122963339432,\n \"acc_norm\": 0.630057803468208,\n\ \ \"acc_norm_stderr\": 0.0368122963339432\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105653,\n\ \ \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105653\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n\ \ \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n\ \ \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n\ \ \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.45614035087719296,\n\ \ \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555497,\n\ \ \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555497\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3888888888888889,\n \"acc_stderr\": 0.025107425481137282,\n \"\ acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.025107425481137282\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3888888888888889,\n\ \ \"acc_stderr\": 0.04360314860077459,\n \"acc_norm\": 0.3888888888888889,\n\ \ \"acc_norm_stderr\": 0.04360314860077459\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n\ \ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n\ \ \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n\ \ \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n\ \ \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\"\ : 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n\ \ \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.8080808080808081,\n \"acc_stderr\": 0.02805779167298901,\n \"\ acc_norm\": 0.8080808080808081,\n \"acc_norm_stderr\": 0.02805779167298901\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8756476683937824,\n \"acc_stderr\": 0.02381447708659355,\n\ \ \"acc_norm\": 0.8756476683937824,\n \"acc_norm_stderr\": 0.02381447708659355\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \ \ \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948492,\n \ \ \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948492\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.03017680828897434,\n \ \ \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.03017680828897434\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.304635761589404,\n \"acc_stderr\": 0.03757949922943343,\n \"acc_norm\"\ : 0.304635761589404,\n \"acc_norm_stderr\": 0.03757949922943343\n },\n\ \ \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8220183486238533,\n\ \ \"acc_stderr\": 0.01639943636661292,\n \"acc_norm\": 0.8220183486238533,\n\ \ \"acc_norm_stderr\": 0.01639943636661292\n },\n \"harness|hendrycksTest-high_school_statistics|5\"\ : {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n\ \ \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078966,\n \"\ acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078966\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7890295358649789,\n \"acc_stderr\": 0.02655837250266192,\n \ \ \"acc_norm\": 0.7890295358649789,\n \"acc_norm_stderr\": 0.02655837250266192\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7085201793721974,\n\ \ \"acc_stderr\": 0.03050028317654585,\n \"acc_norm\": 0.7085201793721974,\n\ \ \"acc_norm_stderr\": 0.03050028317654585\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.0364129708131373,\n\ \ \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.0364129708131373\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228732,\n \"\ acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228732\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n\ \ \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.7592592592592593,\n\ \ \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.0335195387952127,\n\ \ \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.0335195387952127\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n\ \ \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n\ \ \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.03989139859531771,\n\ \ \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.03989139859531771\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n\ \ \"acc_stderr\": 0.022509033937077816,\n \"acc_norm\": 0.8632478632478633,\n\ \ \"acc_norm_stderr\": 0.022509033937077816\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8160919540229885,\n\ \ \"acc_stderr\": 0.01385372417092253,\n \"acc_norm\": 0.8160919540229885,\n\ \ \"acc_norm_stderr\": 0.01385372417092253\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.025070713719153186,\n\ \ \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.025070713719153186\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.37318435754189944,\n\ \ \"acc_stderr\": 0.016175692013381968,\n \"acc_norm\": 0.37318435754189944,\n\ \ \"acc_norm_stderr\": 0.016175692013381968\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.0256468630971379,\n\ \ \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.0256468630971379\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.684887459807074,\n\ \ \"acc_stderr\": 0.026385273703464482,\n \"acc_norm\": 0.684887459807074,\n\ \ \"acc_norm_stderr\": 0.026385273703464482\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.7253086419753086,\n \"acc_stderr\": 0.024836057868294677,\n\ \ \"acc_norm\": 0.7253086419753086,\n \"acc_norm_stderr\": 0.024836057868294677\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.48226950354609927,\n \"acc_stderr\": 0.02980873964223777,\n \ \ \"acc_norm\": 0.48226950354609927,\n \"acc_norm_stderr\": 0.02980873964223777\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.45436766623207303,\n\ \ \"acc_stderr\": 0.012716941720734804,\n \"acc_norm\": 0.45436766623207303,\n\ \ \"acc_norm_stderr\": 0.012716941720734804\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.028245687391462927,\n\ \ \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.028245687391462927\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.6503267973856209,\n \"acc_stderr\": 0.01929196189506638,\n \ \ \"acc_norm\": 0.6503267973856209,\n \"acc_norm_stderr\": 0.01929196189506638\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n\ \ \"acc_stderr\": 0.04494290866252091,\n \"acc_norm\": 0.6727272727272727,\n\ \ \"acc_norm_stderr\": 0.04494290866252091\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.02812342933514278,\n\ \ \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.02812342933514278\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n\ \ \"acc_stderr\": 0.025538433368578334,\n \"acc_norm\": 0.845771144278607,\n\ \ \"acc_norm_stderr\": 0.025538433368578334\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.83,\n \"acc_stderr\": 0.0377525168068637,\n \ \ \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n\ \ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n\ \ \"acc_stderr\": 0.038695433234721015,\n \"acc_norm\": 0.5542168674698795,\n\ \ \"acc_norm_stderr\": 0.038695433234721015\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727668,\n\ \ \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727668\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3880048959608323,\n\ \ \"mc1_stderr\": 0.017058761501347972,\n \"mc2\": 0.5568818997417452,\n\ \ \"mc2_stderr\": 0.015517245006607807\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7640094711917916,\n \"acc_stderr\": 0.011933828850275626\n\ \ },\n \"harness|drop|3\": {\n \"em\": 0.23259228187919462,\n \ \ \"em_stderr\": 0.004326636227794088,\n \"f1\": 0.28881291946308657,\n\ \ \"f1_stderr\": 0.004306419385994737\n },\n \"harness|gsm8k|5\": {\n\ \ \"acc\": 0.11902956785443518,\n \"acc_stderr\": 0.00891970291116164\n\ \ }\n}\n```" repo_url: https://huggingface.co/NeverSleep/Mistral-11B-SynthIAirOmniMix leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|arc:challenge|25_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-12T19-54-58.939194.parquet' - config_name: harness_drop_3 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|drop|3_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-12T19-54-58.939194.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|gsm8k|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hellaswag|10_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T19-54-58.939194.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T19-54-58.939194.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T19-54-58.939194.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_12T19_54_58.939194 path: - '**/details_harness|winogrande|5_2023-11-12T19-54-58.939194.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-12T19-54-58.939194.parquet' - config_name: results data_files: - split: 2023_11_12T19_54_58.939194 path: - results_2023-11-12T19-54-58.939194.parquet - split: latest path: - results_2023-11-12T19-54-58.939194.parquet --- # Dataset Card for Evaluation run of NeverSleep/Mistral-11B-SynthIAirOmniMix ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/NeverSleep/Mistral-11B-SynthIAirOmniMix - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [NeverSleep/Mistral-11B-SynthIAirOmniMix](https://huggingface.co/NeverSleep/Mistral-11B-SynthIAirOmniMix) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NeverSleep__Mistral-11B-SynthIAirOmniMix_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-12T19:54:58.939194](https://huggingface.co/datasets/open-llm-leaderboard/details_NeverSleep__Mistral-11B-SynthIAirOmniMix_public/blob/main/results_2023-11-12T19-54-58.939194.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6277127436205546, "acc_stderr": 0.03243061765974366, "acc_norm": 0.6378229900253635, "acc_norm_stderr": 0.03315507636067878, "mc1": 0.3880048959608323, "mc1_stderr": 0.017058761501347972, "mc2": 0.5568818997417452, "mc2_stderr": 0.015517245006607807, "em": 0.23259228187919462, "em_stderr": 0.004326636227794088, "f1": 0.28881291946308657, "f1_stderr": 0.004306419385994737 }, "harness|arc:challenge|25": { "acc": 0.5921501706484642, "acc_stderr": 0.014361097288449705, "acc_norm": 0.6245733788395904, "acc_norm_stderr": 0.014150631435111728 }, "harness|hellaswag|10": { "acc": 0.6396136227843059, "acc_stderr": 0.004791313101877047, "acc_norm": 0.8313085042820155, "acc_norm_stderr": 0.003737138752336941 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411022, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411022 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.0421850621536888, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.0421850621536888 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.625, "acc_stderr": 0.039397364351956274, "acc_norm": 0.625, "acc_norm_stderr": 0.039397364351956274 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6754716981132075, "acc_stderr": 0.028815615713432115, "acc_norm": 0.6754716981132075, "acc_norm_stderr": 0.028815615713432115 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7222222222222222, "acc_stderr": 0.03745554791462456, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.03745554791462456 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.41, "acc_stderr": 0.04943110704237101, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237101 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.630057803468208, "acc_stderr": 0.0368122963339432, "acc_norm": 0.630057803468208, "acc_norm_stderr": 0.0368122963339432 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105653, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105653 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5574468085106383, "acc_stderr": 0.03246956919789958, "acc_norm": 0.5574468085106383, "acc_norm_stderr": 0.03246956919789958 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.04685473041907789, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.04685473041907789 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555497, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555497 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.025107425481137282, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.025107425481137282 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3888888888888889, "acc_stderr": 0.04360314860077459, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.04360314860077459 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7677419354838709, "acc_stderr": 0.024022256130308235, "acc_norm": 0.7677419354838709, "acc_norm_stderr": 0.024022256130308235 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8080808080808081, "acc_stderr": 0.02805779167298901, "acc_norm": 0.8080808080808081, "acc_norm_stderr": 0.02805779167298901 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8756476683937824, "acc_stderr": 0.02381447708659355, "acc_norm": 0.8756476683937824, "acc_norm_stderr": 0.02381447708659355 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6743589743589744, "acc_stderr": 0.02375966576741229, "acc_norm": 0.6743589743589744, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948492, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948492 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6848739495798319, "acc_stderr": 0.03017680828897434, "acc_norm": 0.6848739495798319, "acc_norm_stderr": 0.03017680828897434 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.03757949922943343, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.03757949922943343 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8220183486238533, "acc_stderr": 0.01639943636661292, "acc_norm": 0.8220183486238533, "acc_norm_stderr": 0.01639943636661292 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5092592592592593, "acc_stderr": 0.034093869469927006, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.026756401538078966, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.026756401538078966 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7890295358649789, "acc_stderr": 0.02655837250266192, "acc_norm": 0.7890295358649789, "acc_norm_stderr": 0.02655837250266192 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7085201793721974, "acc_stderr": 0.03050028317654585, "acc_norm": 0.7085201793721974, "acc_norm_stderr": 0.03050028317654585 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.0364129708131373, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.0364129708131373 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228732, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228732 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243838, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243838 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.0335195387952127, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.0335195387952127 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.03989139859531771, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.03989139859531771 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.022509033937077816, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.022509033937077816 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8160919540229885, "acc_stderr": 0.01385372417092253, "acc_norm": 0.8160919540229885, "acc_norm_stderr": 0.01385372417092253 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6820809248554913, "acc_stderr": 0.025070713719153186, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.025070713719153186 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.37318435754189944, "acc_stderr": 0.016175692013381968, "acc_norm": 0.37318435754189944, "acc_norm_stderr": 0.016175692013381968 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.0256468630971379, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.0256468630971379 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.684887459807074, "acc_stderr": 0.026385273703464482, "acc_norm": 0.684887459807074, "acc_norm_stderr": 0.026385273703464482 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7253086419753086, "acc_stderr": 0.024836057868294677, "acc_norm": 0.7253086419753086, "acc_norm_stderr": 0.024836057868294677 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48226950354609927, "acc_stderr": 0.02980873964223777, "acc_norm": 0.48226950354609927, "acc_norm_stderr": 0.02980873964223777 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.45436766623207303, "acc_stderr": 0.012716941720734804, "acc_norm": 0.45436766623207303, "acc_norm_stderr": 0.012716941720734804 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6838235294117647, "acc_stderr": 0.028245687391462927, "acc_norm": 0.6838235294117647, "acc_norm_stderr": 0.028245687391462927 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6503267973856209, "acc_stderr": 0.01929196189506638, "acc_norm": 0.6503267973856209, "acc_norm_stderr": 0.01929196189506638 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.04494290866252091, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.04494290866252091 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.02812342933514278, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.02812342933514278 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578334, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578334 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.0377525168068637, "acc_norm": 0.83, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.038695433234721015, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.038695433234721015 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727668, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727668 }, "harness|truthfulqa:mc|0": { "mc1": 0.3880048959608323, "mc1_stderr": 0.017058761501347972, "mc2": 0.5568818997417452, "mc2_stderr": 0.015517245006607807 }, "harness|winogrande|5": { "acc": 0.7640094711917916, "acc_stderr": 0.011933828850275626 }, "harness|drop|3": { "em": 0.23259228187919462, "em_stderr": 0.004326636227794088, "f1": 0.28881291946308657, "f1_stderr": 0.004306419385994737 }, "harness|gsm8k|5": { "acc": 0.11902956785443518, "acc_stderr": 0.00891970291116164 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.6985575556755066, -0.8327271342277527, 0.3026367425918579, 0.2040702849626541, -0.17065204679965973, -0.044291265308856964, 0.04547892138361931, -0.23741139471530914, 0.5692649483680725, -0.022399526089429855, -0.5054463148117065, -0.7347602844238281, -0.4416039288043976, 0.232695698738...
null
null
null
null
null
null
null
null
null
null
null
null
null
Kaue123456/ZeLuizPatati
Kaue123456
2023-11-12T19:59:18Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-12T19:59:18Z
2023-11-12T19:58:24.000Z
2023-11-12T19:58:24
--- license: openrail ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
Capsekai/Uracon
Capsekai
2023-11-12T21:36:14Z
0
0
null
[ "task_categories:text-classification", "size_categories:1K<n<10K", "language:en", "license:creativeml-openrail-m", "art", "region:us" ]
2023-11-12T21:36:14Z
2023-11-12T21:28:55.000Z
2023-11-12T21:28:55
--- license: creativeml-openrail-m task_categories: - text-classification language: - en tags: - art size_categories: - 1K<n<10K --- The animation was independently produced by Shinji Aramaki and his manga club during their time at Okayama University. The animation premiered at the URACON III sci-fi convention in 1984. More information can be found on MyAnimeList https://myanimelist.net/anime/42390/Uracon_III_Opening_Animation More caps can be found on our youtube https://capsekai.tumblr.com/
[ -0.5321662425994873, -0.13370613753795624, 0.5781287550926208, 0.07995578646659851, 0.15792202949523926, -0.04246586188673973, -0.1307913213968277, -0.3366507291793823, 0.857244074344635, 0.6407222747802734, -1.1462594270706177, -0.40319931507110596, -0.31455227732658386, 0.189939022064208...
null
null
null
null
null
null
null
null
null
null
null
null
null
ChristophSchuhmann/emotions
ChristophSchuhmann
2023-11-15T08:42:51Z
0
0
null
[ "license:apache-2.0", "region:us" ]
2023-11-15T08:42:51Z
2023-11-12T21:34:43.000Z
2023-11-12T21:34:43
--- license: apache-2.0 ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_PulsarAI__CollectiveCognition-v1.1-Nebula-7B_public
open-llm-leaderboard
2023-11-12T21:46:18Z
0
0
null
[ "region:us" ]
2023-11-12T21:46:18Z
2023-11-12T21:45:18.000Z
2023-11-12T21:45:18
--- pretty_name: Evaluation run of PulsarAI/CollectiveCognition-v1.1-Nebula-7B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [PulsarAI/CollectiveCognition-v1.1-Nebula-7B](https://huggingface.co/PulsarAI/CollectiveCognition-v1.1-Nebula-7B)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PulsarAI__CollectiveCognition-v1.1-Nebula-7B_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-12T21:42:17.063541](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__CollectiveCognition-v1.1-Nebula-7B_public/blob/main/results_2023-11-12T21-42-17.063541.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5655902624582015,\n\ \ \"acc_stderr\": 0.033540567370804734,\n \"acc_norm\": 0.5747445580416879,\n\ \ \"acc_norm_stderr\": 0.03431067576831402,\n \"mc1\": 0.38555691554467564,\n\ \ \"mc1_stderr\": 0.01703883901059167,\n \"mc2\": 0.5353024010333743,\n\ \ \"mc2_stderr\": 0.015743888224866397,\n \"em\": 0.35675335570469796,\n\ \ \"em_stderr\": 0.004905829488253491,\n \"f1\": 0.4216977768456382,\n\ \ \"f1_stderr\": 0.0047367493845716785\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5324232081911263,\n \"acc_stderr\": 0.014580637569995421,\n\ \ \"acc_norm\": 0.5810580204778157,\n \"acc_norm_stderr\": 0.014418106953639013\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6309500099581756,\n\ \ \"acc_stderr\": 0.004815613144385404,\n \"acc_norm\": 0.8239394542919737,\n\ \ \"acc_norm_stderr\": 0.0038009327705977565\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5555555555555556,\n\ \ \"acc_stderr\": 0.04292596718256981,\n \"acc_norm\": 0.5555555555555556,\n\ \ \"acc_norm_stderr\": 0.04292596718256981\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5986842105263158,\n \"acc_stderr\": 0.03988903703336284,\n\ \ \"acc_norm\": 0.5986842105263158,\n \"acc_norm_stderr\": 0.03988903703336284\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.43,\n\ \ \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \ \ \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6188679245283019,\n \"acc_stderr\": 0.029890609686286623,\n\ \ \"acc_norm\": 0.6188679245283019,\n \"acc_norm_stderr\": 0.029890609686286623\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6319444444444444,\n\ \ \"acc_stderr\": 0.040329990539607175,\n \"acc_norm\": 0.6319444444444444,\n\ \ \"acc_norm_stderr\": 0.040329990539607175\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.45,\n\ \ \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \ \ \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5433526011560693,\n\ \ \"acc_stderr\": 0.03798106566014498,\n \"acc_norm\": 0.5433526011560693,\n\ \ \"acc_norm_stderr\": 0.03798106566014498\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.30392156862745096,\n \"acc_stderr\": 0.04576665403207763,\n\ \ \"acc_norm\": 0.30392156862745096,\n \"acc_norm_stderr\": 0.04576665403207763\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n\ \ \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.49361702127659574,\n \"acc_stderr\": 0.03268335899936337,\n\ \ \"acc_norm\": 0.49361702127659574,\n \"acc_norm_stderr\": 0.03268335899936337\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n\ \ \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n\ \ \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5448275862068965,\n \"acc_stderr\": 0.04149886942192117,\n\ \ \"acc_norm\": 0.5448275862068965,\n \"acc_norm_stderr\": 0.04149886942192117\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3915343915343915,\n \"acc_stderr\": 0.02513809138885108,\n \"\ acc_norm\": 0.3915343915343915,\n \"acc_norm_stderr\": 0.02513809138885108\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.38095238095238093,\n\ \ \"acc_stderr\": 0.04343525428949098,\n \"acc_norm\": 0.38095238095238093,\n\ \ \"acc_norm_stderr\": 0.04343525428949098\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6483870967741936,\n\ \ \"acc_stderr\": 0.027162537826948458,\n \"acc_norm\": 0.6483870967741936,\n\ \ \"acc_norm_stderr\": 0.027162537826948458\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.45320197044334976,\n \"acc_stderr\": 0.03502544650845872,\n\ \ \"acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.03502544650845872\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\"\ : 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.03453131801885417,\n\ \ \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03453131801885417\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7676767676767676,\n \"acc_stderr\": 0.030088629490217487,\n \"\ acc_norm\": 0.7676767676767676,\n \"acc_norm_stderr\": 0.030088629490217487\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8238341968911918,\n \"acc_stderr\": 0.02749350424454806,\n\ \ \"acc_norm\": 0.8238341968911918,\n \"acc_norm_stderr\": 0.02749350424454806\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5615384615384615,\n \"acc_stderr\": 0.025158266016868592,\n\ \ \"acc_norm\": 0.5615384615384615,\n \"acc_norm_stderr\": 0.025158266016868592\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2740740740740741,\n \"acc_stderr\": 0.027195934804085626,\n \ \ \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085626\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.0322529423239964,\n \ \ \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.0322529423239964\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"\ acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7614678899082569,\n \"acc_stderr\": 0.018272575810231867,\n \"\ acc_norm\": 0.7614678899082569,\n \"acc_norm_stderr\": 0.018272575810231867\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.39351851851851855,\n \"acc_stderr\": 0.03331747876370312,\n \"\ acc_norm\": 0.39351851851851855,\n \"acc_norm_stderr\": 0.03331747876370312\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7205882352941176,\n \"acc_stderr\": 0.03149328104507957,\n \"\ acc_norm\": 0.7205882352941176,\n \"acc_norm_stderr\": 0.03149328104507957\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.729957805907173,\n \"acc_stderr\": 0.028900721906293426,\n \ \ \"acc_norm\": 0.729957805907173,\n \"acc_norm_stderr\": 0.028900721906293426\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6681614349775785,\n\ \ \"acc_stderr\": 0.03160295143776679,\n \"acc_norm\": 0.6681614349775785,\n\ \ \"acc_norm_stderr\": 0.03160295143776679\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6564885496183206,\n \"acc_stderr\": 0.041649760719448786,\n\ \ \"acc_norm\": 0.6564885496183206,\n \"acc_norm_stderr\": 0.041649760719448786\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070417,\n \"\ acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070417\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n\ \ \"acc_stderr\": 0.044531975073749834,\n \"acc_norm\": 0.6944444444444444,\n\ \ \"acc_norm_stderr\": 0.044531975073749834\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6871165644171779,\n \"acc_stderr\": 0.036429145782924055,\n\ \ \"acc_norm\": 0.6871165644171779,\n \"acc_norm_stderr\": 0.036429145782924055\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.36607142857142855,\n\ \ \"acc_stderr\": 0.0457237235873743,\n \"acc_norm\": 0.36607142857142855,\n\ \ \"acc_norm_stderr\": 0.0457237235873743\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7378640776699029,\n \"acc_stderr\": 0.04354631077260597,\n\ \ \"acc_norm\": 0.7378640776699029,\n \"acc_norm_stderr\": 0.04354631077260597\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.811965811965812,\n\ \ \"acc_stderr\": 0.025598193686652265,\n \"acc_norm\": 0.811965811965812,\n\ \ \"acc_norm_stderr\": 0.025598193686652265\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \ \ \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7713920817369093,\n\ \ \"acc_stderr\": 0.015016884698539892,\n \"acc_norm\": 0.7713920817369093,\n\ \ \"acc_norm_stderr\": 0.015016884698539892\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6184971098265896,\n \"acc_stderr\": 0.0261521986197268,\n\ \ \"acc_norm\": 0.6184971098265896,\n \"acc_norm_stderr\": 0.0261521986197268\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.22793296089385476,\n\ \ \"acc_stderr\": 0.014030149950805098,\n \"acc_norm\": 0.22793296089385476,\n\ \ \"acc_norm_stderr\": 0.014030149950805098\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6405228758169934,\n \"acc_stderr\": 0.027475969910660952,\n\ \ \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.027475969910660952\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6334405144694534,\n\ \ \"acc_stderr\": 0.027368078243971646,\n \"acc_norm\": 0.6334405144694534,\n\ \ \"acc_norm_stderr\": 0.027368078243971646\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6820987654320988,\n \"acc_stderr\": 0.02591006352824088,\n\ \ \"acc_norm\": 0.6820987654320988,\n \"acc_norm_stderr\": 0.02591006352824088\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.42907801418439717,\n \"acc_stderr\": 0.02952591430255856,\n \ \ \"acc_norm\": 0.42907801418439717,\n \"acc_norm_stderr\": 0.02952591430255856\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4315514993481095,\n\ \ \"acc_stderr\": 0.012650007999463888,\n \"acc_norm\": 0.4315514993481095,\n\ \ \"acc_norm_stderr\": 0.012650007999463888\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5257352941176471,\n \"acc_stderr\": 0.030332578094555033,\n\ \ \"acc_norm\": 0.5257352941176471,\n \"acc_norm_stderr\": 0.030332578094555033\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.6078431372549019,\n \"acc_stderr\": 0.019751726508762637,\n \ \ \"acc_norm\": 0.6078431372549019,\n \"acc_norm_stderr\": 0.019751726508762637\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n\ \ \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n\ \ \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5755102040816327,\n \"acc_stderr\": 0.031642094879429414,\n\ \ \"acc_norm\": 0.5755102040816327,\n \"acc_norm_stderr\": 0.031642094879429414\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7512437810945274,\n\ \ \"acc_stderr\": 0.030567675938916718,\n \"acc_norm\": 0.7512437810945274,\n\ \ \"acc_norm_stderr\": 0.030567675938916718\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036845,\n \ \ \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036845\n },\n\ \ \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n\ \ \"acc_stderr\": 0.038899512528272166,\n \"acc_norm\": 0.5180722891566265,\n\ \ \"acc_norm_stderr\": 0.038899512528272166\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.03158149539338734,\n\ \ \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.03158149539338734\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.38555691554467564,\n\ \ \"mc1_stderr\": 0.01703883901059167,\n \"mc2\": 0.5353024010333743,\n\ \ \"mc2_stderr\": 0.015743888224866397\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7371744277821626,\n \"acc_stderr\": 0.012370922527262008\n\ \ },\n \"harness|drop|3\": {\n \"em\": 0.35675335570469796,\n \ \ \"em_stderr\": 0.004905829488253491,\n \"f1\": 0.4216977768456382,\n\ \ \"f1_stderr\": 0.0047367493845716785\n },\n \"harness|gsm8k|5\":\ \ {\n \"acc\": 0.09552691432903715,\n \"acc_stderr\": 0.008096605771155759\n\ \ }\n}\n```" repo_url: https://huggingface.co/PulsarAI/CollectiveCognition-v1.1-Nebula-7B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|arc:challenge|25_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-12T21-42-17.063541.parquet' - config_name: harness_drop_3 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|drop|3_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-12T21-42-17.063541.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|gsm8k|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hellaswag|10_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-12T21-42-17.063541.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-12T21-42-17.063541.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-12T21-42-17.063541.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_12T21_42_17.063541 path: - '**/details_harness|winogrande|5_2023-11-12T21-42-17.063541.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-12T21-42-17.063541.parquet' - config_name: results data_files: - split: 2023_11_12T21_42_17.063541 path: - results_2023-11-12T21-42-17.063541.parquet - split: latest path: - results_2023-11-12T21-42-17.063541.parquet --- # Dataset Card for Evaluation run of PulsarAI/CollectiveCognition-v1.1-Nebula-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PulsarAI/CollectiveCognition-v1.1-Nebula-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [PulsarAI/CollectiveCognition-v1.1-Nebula-7B](https://huggingface.co/PulsarAI/CollectiveCognition-v1.1-Nebula-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PulsarAI__CollectiveCognition-v1.1-Nebula-7B_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-12T21:42:17.063541](https://huggingface.co/datasets/open-llm-leaderboard/details_PulsarAI__CollectiveCognition-v1.1-Nebula-7B_public/blob/main/results_2023-11-12T21-42-17.063541.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5655902624582015, "acc_stderr": 0.033540567370804734, "acc_norm": 0.5747445580416879, "acc_norm_stderr": 0.03431067576831402, "mc1": 0.38555691554467564, "mc1_stderr": 0.01703883901059167, "mc2": 0.5353024010333743, "mc2_stderr": 0.015743888224866397, "em": 0.35675335570469796, "em_stderr": 0.004905829488253491, "f1": 0.4216977768456382, "f1_stderr": 0.0047367493845716785 }, "harness|arc:challenge|25": { "acc": 0.5324232081911263, "acc_stderr": 0.014580637569995421, "acc_norm": 0.5810580204778157, "acc_norm_stderr": 0.014418106953639013 }, "harness|hellaswag|10": { "acc": 0.6309500099581756, "acc_stderr": 0.004815613144385404, "acc_norm": 0.8239394542919737, "acc_norm_stderr": 0.0038009327705977565 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04292596718256981, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04292596718256981 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5986842105263158, "acc_stderr": 0.03988903703336284, "acc_norm": 0.5986842105263158, "acc_norm_stderr": 0.03988903703336284 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6188679245283019, "acc_stderr": 0.029890609686286623, "acc_norm": 0.6188679245283019, "acc_norm_stderr": 0.029890609686286623 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6319444444444444, "acc_stderr": 0.040329990539607175, "acc_norm": 0.6319444444444444, "acc_norm_stderr": 0.040329990539607175 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5433526011560693, "acc_stderr": 0.03798106566014498, "acc_norm": 0.5433526011560693, "acc_norm_stderr": 0.03798106566014498 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.04576665403207763, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.04576665403207763 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.49361702127659574, "acc_stderr": 0.03268335899936337, "acc_norm": 0.49361702127659574, "acc_norm_stderr": 0.03268335899936337 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04677473004491199, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5448275862068965, "acc_stderr": 0.04149886942192117, "acc_norm": 0.5448275862068965, "acc_norm_stderr": 0.04149886942192117 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3915343915343915, "acc_stderr": 0.02513809138885108, "acc_norm": 0.3915343915343915, "acc_norm_stderr": 0.02513809138885108 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.38095238095238093, "acc_stderr": 0.04343525428949098, "acc_norm": 0.38095238095238093, "acc_norm_stderr": 0.04343525428949098 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6483870967741936, "acc_stderr": 0.027162537826948458, "acc_norm": 0.6483870967741936, "acc_norm_stderr": 0.027162537826948458 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.45320197044334976, "acc_stderr": 0.03502544650845872, "acc_norm": 0.45320197044334976, "acc_norm_stderr": 0.03502544650845872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7333333333333333, "acc_stderr": 0.03453131801885417, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03453131801885417 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7676767676767676, "acc_stderr": 0.030088629490217487, "acc_norm": 0.7676767676767676, "acc_norm_stderr": 0.030088629490217487 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8238341968911918, "acc_stderr": 0.02749350424454806, "acc_norm": 0.8238341968911918, "acc_norm_stderr": 0.02749350424454806 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5615384615384615, "acc_stderr": 0.025158266016868592, "acc_norm": 0.5615384615384615, "acc_norm_stderr": 0.025158266016868592 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2740740740740741, "acc_stderr": 0.027195934804085626, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085626 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5588235294117647, "acc_stderr": 0.0322529423239964, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.0322529423239964 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7614678899082569, "acc_stderr": 0.018272575810231867, "acc_norm": 0.7614678899082569, "acc_norm_stderr": 0.018272575810231867 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.39351851851851855, "acc_stderr": 0.03331747876370312, "acc_norm": 0.39351851851851855, "acc_norm_stderr": 0.03331747876370312 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7205882352941176, "acc_stderr": 0.03149328104507957, "acc_norm": 0.7205882352941176, "acc_norm_stderr": 0.03149328104507957 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.729957805907173, "acc_stderr": 0.028900721906293426, "acc_norm": 0.729957805907173, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6681614349775785, "acc_stderr": 0.03160295143776679, "acc_norm": 0.6681614349775785, "acc_norm_stderr": 0.03160295143776679 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6564885496183206, "acc_stderr": 0.041649760719448786, "acc_norm": 0.6564885496183206, "acc_norm_stderr": 0.041649760719448786 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.03896878985070417, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6944444444444444, "acc_stderr": 0.044531975073749834, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.044531975073749834 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6871165644171779, "acc_stderr": 0.036429145782924055, "acc_norm": 0.6871165644171779, "acc_norm_stderr": 0.036429145782924055 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.36607142857142855, "acc_stderr": 0.0457237235873743, "acc_norm": 0.36607142857142855, "acc_norm_stderr": 0.0457237235873743 }, "harness|hendrycksTest-management|5": { "acc": 0.7378640776699029, "acc_stderr": 0.04354631077260597, "acc_norm": 0.7378640776699029, "acc_norm_stderr": 0.04354631077260597 }, "harness|hendrycksTest-marketing|5": { "acc": 0.811965811965812, "acc_stderr": 0.025598193686652265, "acc_norm": 0.811965811965812, "acc_norm_stderr": 0.025598193686652265 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7713920817369093, "acc_stderr": 0.015016884698539892, "acc_norm": 0.7713920817369093, "acc_norm_stderr": 0.015016884698539892 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6184971098265896, "acc_stderr": 0.0261521986197268, "acc_norm": 0.6184971098265896, "acc_norm_stderr": 0.0261521986197268 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.22793296089385476, "acc_stderr": 0.014030149950805098, "acc_norm": 0.22793296089385476, "acc_norm_stderr": 0.014030149950805098 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6405228758169934, "acc_stderr": 0.027475969910660952, "acc_norm": 0.6405228758169934, "acc_norm_stderr": 0.027475969910660952 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6334405144694534, "acc_stderr": 0.027368078243971646, "acc_norm": 0.6334405144694534, "acc_norm_stderr": 0.027368078243971646 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6820987654320988, "acc_stderr": 0.02591006352824088, "acc_norm": 0.6820987654320988, "acc_norm_stderr": 0.02591006352824088 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.42907801418439717, "acc_stderr": 0.02952591430255856, "acc_norm": 0.42907801418439717, "acc_norm_stderr": 0.02952591430255856 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4315514993481095, "acc_stderr": 0.012650007999463888, "acc_norm": 0.4315514993481095, "acc_norm_stderr": 0.012650007999463888 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5257352941176471, "acc_stderr": 0.030332578094555033, "acc_norm": 0.5257352941176471, "acc_norm_stderr": 0.030332578094555033 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6078431372549019, "acc_stderr": 0.019751726508762637, "acc_norm": 0.6078431372549019, "acc_norm_stderr": 0.019751726508762637 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5755102040816327, "acc_stderr": 0.031642094879429414, "acc_norm": 0.5755102040816327, "acc_norm_stderr": 0.031642094879429414 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7512437810945274, "acc_stderr": 0.030567675938916718, "acc_norm": 0.7512437810945274, "acc_norm_stderr": 0.030567675938916718 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.8, "acc_stderr": 0.04020151261036845, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036845 }, "harness|hendrycksTest-virology|5": { "acc": 0.5180722891566265, "acc_stderr": 0.038899512528272166, "acc_norm": 0.5180722891566265, "acc_norm_stderr": 0.038899512528272166 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.783625730994152, "acc_stderr": 0.03158149539338734, "acc_norm": 0.783625730994152, "acc_norm_stderr": 0.03158149539338734 }, "harness|truthfulqa:mc|0": { "mc1": 0.38555691554467564, "mc1_stderr": 0.01703883901059167, "mc2": 0.5353024010333743, "mc2_stderr": 0.015743888224866397 }, "harness|winogrande|5": { "acc": 0.7371744277821626, "acc_stderr": 0.012370922527262008 }, "harness|drop|3": { "em": 0.35675335570469796, "em_stderr": 0.004905829488253491, "f1": 0.4216977768456382, "f1_stderr": 0.0047367493845716785 }, "harness|gsm8k|5": { "acc": 0.09552691432903715, "acc_stderr": 0.008096605771155759 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7202279567718506, -0.8087406158447266, 0.2850535213947296, 0.2077576220035553, -0.18288084864616394, -0.010340481996536255, 0.04068601131439209, -0.23714177310466766, 0.6177421808242798, -0.04812793433666229, -0.4969571530818939, -0.6732044219970703, -0.4449620544910431, 0.2705213129520...
null
null
null
null
null
null
null
null
null
null
null
null
null
Capsekai/ShironoKiseki
Capsekai
2023-11-12T21:56:28Z
0
0
null
[ "task_categories:text-classification", "size_categories:n<1K", "language:en", "license:creativeml-openrail-m", "anime", "art", "region:us" ]
2023-11-12T21:56:28Z
2023-11-12T21:51:23.000Z
2023-11-12T21:51:23
--- license: creativeml-openrail-m task_categories: - text-classification language: - en tags: - anime - art pretty_name: Shiro No Kiseki size_categories: - n<1K --- Shiro No Kiseki is a lesser known anime cenetered around the subject of the Shinsengumi. Capped by https://capsekai.tumblr.com/
[ -0.632133424282074, -0.5688022971153259, 0.8150709271430969, 0.3205197751522064, 0.007258637808263302, 0.2715767025947571, -0.14172999560832977, -0.32084816694259644, 1.2684016227722168, 0.4352180063724518, -1.0648759603500366, -0.22782811522483826, -0.5367999076843262, -0.0138532873243093...
null
null
null
null
null
null
null
null
null
null
null
null
null
danielz01/DIOR-RSVG
danielz01
2023-11-12T22:08:29Z
0
0
null
[ "region:us" ]
2023-11-12T22:08:29Z
2023-11-12T21:57:37.000Z
2023-11-12T21:57:37
--- dataset_info: features: - name: image dtype: image - name: path dtype: string - name: objects struct: - name: bbox sequence: sequence: int32 - name: captions sequence: string - name: categories sequence: string - name: categories_normalized sequence: string splits: - name: train num_bytes: 4808350968.376 num_examples: 14748 - name: val num_bytes: 1170343155.264 num_examples: 3457 - name: test num_bytes: 2083503162.625 num_examples: 6125 download_size: 7863851320 dataset_size: 8062197286.265 configs: - config_name: default data_files: - split: train path: data/train-* - split: val path: data/val-* - split: test path: data/test-* --- # Dataset Card for "DIOR-RSVG" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.6429999470710754, -0.15914000570774078, -0.012366417795419693, 0.19680152833461761, -0.3297038972377777, 0.06786800920963287, 0.3788057267665863, 0.06485798954963684, 0.7052408456802368, 0.1902627944946289, -0.8948976993560791, -0.8613997101783752, -0.8035734295845032, -0.07587087899446...
null
null
null
null
null
null
null
null
null
null
null
null
null
zeio/branch
zeio
2023-11-17T00:59:08Z
0
0
null
[ "license:apache-2.0", "region:us" ]
2023-11-17T00:59:08Z
2023-11-12T22:18:53.000Z
2023-11-12T22:18:53
--- license: apache-2.0 ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
DJBanzin/Vozesvukvuk
DJBanzin
2023-11-12T22:43:00Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-12T22:43:00Z
2023-11-12T22:39:45.000Z
2023-11-12T22:39:45
--- license: openrail ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
NomeIncrivel/Pernalonga
NomeIncrivel
2023-11-12T22:44:10Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-12T22:44:10Z
2023-11-12T22:43:49.000Z
2023-11-12T22:43:49
--- license: openrail ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
Ahmed107/sample
Ahmed107
2023-11-12T23:11:40Z
0
0
null
[ "region:us" ]
2023-11-12T23:11:40Z
2023-11-12T23:11:40.000Z
2023-11-12T23:11:40
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
Tonic/EasyReddit
Tonic
2023-11-13T12:52:23Z
0
2
null
[ "size_categories:10M<n<100M", "language:en", "license:mit", "not-for-all-audiences", "chemistry", "biology", "finance", "legal", "music", "art", "code", "climate", "medical", "region:us" ]
2023-11-13T12:52:23Z
2023-11-12T23:18:16.000Z
2023-11-12T23:18:16
--- license: mit language: - en tags: - not-for-all-audiences - chemistry - biology - finance - legal - music - art - code - climate - medical pretty_name: Easy Reddit size_categories: - 10M<n<100M configs: - config_name: shards data_files: - split: train path: - shard_1.jsonl - shard_2.jsonl - shard_3.jsonl - shard_4.jsonl - shard_5.jsonl - shard_6.jsonl - shard_7.jsonl - shard_8.jsonl - shard_9.jsonl - shard_10.jsonl - shard_11.jsonl - shard_12.jsonl - shard_13.jsonl - shard_14.jsonl - shard_15.jsonl - shard_16.jsonl - shard_17.jsonl - shard_18.jsonl - shard_19.jsonl - shard_20.jsonl - shard_21.jsonl - shard_22.jsonl - shard_23.jsonl - shard_24.jsonl - shard_25.jsonl - shard_26.jsonl - shard_27.jsonl - shard_28.jsonl - shard_29.jsonl - shard_30.jsonl - shard_31.jsonl - shard_32.jsonl - shard_33.jsonl - shard_34.jsonl --- # 🙋🏻‍♂️Welcome to 🧑🏻‍🚀Tonic's🚀🚰Easy🔴Reddit🔥! ![image/png](https://cdn-uploads.huggingface.co/production/uploads/62a3bb1cd0d8c2c2169f0b88/tsm1OFhNgT4wzIw-_MGQ2.png) This is every "best reddit_question_best_answers" appended and produced according to the following template : ```json {"prompt": "This is the first prompt", "completion": "This is the first completion"} {"prompt": "This is the second prompt", "completion": "This is the second completion"} ``` ![image/png](https://cdn-uploads.huggingface.co/production/uploads/62a3bb1cd0d8c2c2169f0b88/N_RqZSJ32MDIrRGbLcPqm.png) - 🌟 You can use it in shards or all together ! - 🌟 This dataset is **internally consistent** ! 🤔The point is to make it easy to train models with a single correctly formatted dataset of - **54,367,153 rows** # Original Dataset : [nreimers/reddit_question_best_answers](https://huggingface.co/datasets/nreimers/reddit_question_best_answers) # How To Use : Combine random shards in random quantities to produce a very high quality conversational training dataset for fine tuning or try combining rows line by line to save memory by running the following code: ```python # see selectbyline.py import os import random # Directory containing the shard JSONL files shard_directory = "/path/to/shard/directory" # Get a list of all JSONL files in the directory shard_files = [f for f in os.listdir(shard_directory) if f.endswith('.jsonl')] # Function to read a random number of lines (between min_lines and max_lines) from a file def read_random_lines(filename, min_lines, max_lines): selected_lines = [] num_lines = random.randint(min_lines, max_lines) with open(filename, 'r') as file: lines = list(file) if len(lines) <= num_lines: return lines selected_lines = random.sample(lines, num_lines) return selected_lines # Function to combine shards def combine_shards(output_filename, num_combinations): with open(output_filename, 'w') as output_file: for _ in range(num_combinations): selected_shard_file = random.choice(shard_files) lines = read_random_lines(os.path.join(shard_directory, selected_shard_file), 5000, 10000) output_file.writelines(lines) # Example usage combine_shards("/path/to/output/combined_shards.jsonl", 10) ``` # Pre-Processing ```python import json import os import gzip import logging import re import random # Setup basic logging logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") def clean_string(s): """Remove special characters, keeping only alphanumeric characters and spaces.""" if isinstance(s, list): # Extract text from each dictionary in the list and join into a single string s = " ".join([d.get("body", "") if isinstance(d, dict) else str(d) for d in s]) return re.sub(r'[^A-Za-z0-9 ]+', '', s) def process_file(input_file, output_file): try: dataset = [] with gzip.open(input_file, 'rt') as infile: for line in infile: # Parse the JSON line try: data = json.loads(line) except json.JSONDecodeError: logging.error(f"Invalid JSON format in {input_file}: {line}") continue # Extract and clean the 'body' and 'answers' fields prompt = clean_string(data.get("body", "")) completion = clean_string(data.get("answers", "")) # For each body found, make a new row and duplicate the prompt for it if isinstance(data.get("body", ""), list): for body in data.get("body", []): cleaned_body = clean_string(body) dataset.append({"prompt": cleaned_body, "completion": completion}) else: dataset.append({"prompt": prompt, "completion": completion}) # Shuffle the dataset random.shuffle(dataset) # Write the shuffled dataset to the output file with open(output_file, 'a') as outfile: for item in dataset: json.dump(item, outfile) outfile.write('\n') logging.info(f"Processed file: {input_file}") except Exception as e: logging.error(f"Error processing file {input_file}: {e}") def process_files(file_list, output_dir): # Ensure the output directory exists if not os.path.exists(output_dir): os.makedirs(output_dir) # Create a single output file path output_file = os.path.join(output_dir, 'synthesized_dataset.jsonl') for input_file in file_list: process_file(input_file, output_file) # Update with your list of .gz file paths file_list = [r'C:\Users\MeMyself\FILES, r"C:\Users\MeMyself\FILES" ] # Update with your list of .gz file paths output_dir = r'C:\Users\MeMyself\reddit_question_best_answers\processed' process_files(file_list, output_dir) ``` #### **sharding script** : ```python import json import os def read_dataset(file_path): try: with open(file_path, 'r') as file: data = [json.loads(line) for line in file] print(f"Dataset loaded successfully from {file_path}.") return data except Exception as e: print(f"Error reading dataset from {file_path}: {e}") return [] def shard_dataset(dataset, num_shards): shard_size = len(dataset) // num_shards shards = [dataset[i:i + shard_size] for i in range(0, len(dataset), shard_size)] if len(shards) > num_shards: shards[num_shards - 1].extend(shards.pop()) print(f"Dataset sharded into {num_shards} parts.") return shards def write_shards(shards, output_dir): if not os.path.exists(output_dir): os.makedirs(output_dir) print(f"Created output directory at {output_dir}.") for i, shard in enumerate(shards): shard_file = os.path.join(output_dir, f'shard_{i+1}.jsonl') with open(shard_file, 'w') as file: for item in shard: json.dump(item, file) file.write('\n') print(f"Shard {i+1} written to {shard_file}.") def main(): input_file = 'path_to_processed_dataset.jsonl' # Update with your processed dataset file path output_dir = 'sharded_dataset' # Update with your output directory for shards num_shards = 33 dataset = read_dataset(input_file) if dataset: shards = shard_dataset(dataset, num_shards) write_shards(shards, output_dir) print("All shards have been successfully written.") else: print("No dataset to process.") if __name__ == "__main__": main() ``` ### Disclaimer : 🌟Re-format this dataset before use. 🌟Probably there's a **big problem with the token count** on these long answers 😉 🌟**Good Luck !** 🧑🏻‍🚀🚀
[ -0.3134106695652008, -0.8404726982116699, 0.41758301854133606, 0.03120826557278633, -0.30854305624961853, 0.0670580193400383, -0.2533075511455536, -0.125591441988945, 0.4451187551021576, 0.6773446202278137, -0.3145877718925476, -0.44157782196998596, -0.4347592890262604, 0.276363730430603, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
maywell/ko-gpt3_14k
maywell
2023-11-12T23:40:02Z
0
1
null
[ "region:us" ]
2023-11-12T23:40:02Z
2023-11-12T23:25:23.000Z
2023-11-12T23:25:23
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
BEE-spoke-data/govdocs1-txt-raw
BEE-spoke-data
2023-11-19T20:03:52Z
0
0
null
[ "task_categories:text-generation", "size_categories:10K<n<100K", "license:odc-by", "region:us" ]
2023-11-19T20:03:52Z
2023-11-12T23:28:17.000Z
2023-11-12T23:28:17
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: validation path: data/validation-* - split: test path: data/test-* dataset_info: features: - name: section dtype: string - name: filename dtype: string - name: text dtype: string splits: - name: train num_bytes: 16101385278.039026 num_examples: 67984 - name: validation num_bytes: 894547719.9804866 num_examples: 3777 - name: test num_bytes: 894547719.9804866 num_examples: 3777 download_size: 7656656755 dataset_size: 17890480718 license: odc-by task_categories: - text-generation size_categories: - 10K<n<100K --- # Dataset Card for "govdocs1-txt-raw" Somewhere to put the raw txt files before filtering them Source info/page: https://digitalcorpora.org/corpora/file-corpora/files/ ``` @inproceedings{garfinkel2009bringing, title={Bringing Science to Digital Forensics with Standardized Forensic Corpora}, author={Garfinkel, Simson and Farrell, Paul and Roussev, Vassil and Dinolt, George}, booktitle={Digital Forensic Research Workshop (DFRWS) 2009}, year={2009}, address={Montreal, Canada}, url={https://digitalcorpora.org/corpora/file-corpora/files/} } ```
[ -0.46472835540771484, -0.3683289885520935, 0.7096522450447083, -0.36276185512542725, -0.73039710521698, 0.28545284271240234, -0.11977482587099075, -0.21620768308639526, 0.24451203644275665, 0.8278258442878723, -0.6420505046844482, -0.8863324522972107, -0.5411277413368225, 0.349764972925186...
null
null
null
null
null
null
null
null
null
null
null
null
null
Rodrigo-1/Protocolo_Hair
Rodrigo-1
2023-11-12T23:47:06Z
0
0
null
[ "license:unknown", "region:us" ]
2023-11-12T23:47:06Z
2023-11-12T23:46:51.000Z
2023-11-12T23:46:51
--- license: unknown ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
chunlohk/Archtype
chunlohk
2023-11-13T05:08:56Z
0
0
null
[ "region:us" ]
2023-11-13T05:08:56Z
2023-11-12T23:49:01.000Z
2023-11-12T23:49:01
[ { "instruction": "What emotional appeal do Innocent archetype brands typically use in their advertising?", "input": "", "output": "Innocent archetype brands typically use emotional appeals that focus on happiness, comfort, and a return to simpler times. Their advertising often evokes feelings of warmth, safety, and nostalgia, aiming to create a connection through positive, wholesome imagery." }, { "instruction": "How do Sage archetype brands position themselves in terms of expertise and knowledge?", "input": "", "output": "Sage archetype brands position themselves as sources of wisdom and expertise. They often highlight their depth of knowledge, commitment to truth, and intellectual prowess, portraying themselves as trusted authorities in their field." }, { "instruction": "What is the key messaging strategy of Explorer archetype brands regarding adventure and discovery?", "input": "", "output": "The key messaging strategy of Explorer archetype brands is to emphasize the thrill of adventure and the value of discovery. They inspire their audience to embark on journeys, both literal and metaphorical, that promise excitement, freedom, and self-discovery." }, { "instruction": "In what ways do Innocent archetype brands promote simplicity and clarity?", "input": "", "output": "Innocent archetype brands promote simplicity and clarity by using straightforward messaging, clean and uncomplicated design, and focusing on the essential qualities of their products or services. They aim to provide clear, simple solutions that resonate with a desire for purity and ease." }, { "instruction": "How do Sage archetype brands use educational content to engage their audience?", "input": "", "output": "Sage archetype brands use educational content such as tutorials, informative articles, expert opinions, and insightful research to engage their audience. They focus on providing valuable knowledge and understanding, thus positioning themselves as go-to sources for information and education." }, { "instruction": "What kind of imagery do Explorer archetype brands often use to appeal to their target audience?", "input": "", "output": "Explorer archetype brands often use imagery that conveys vast, open landscapes, uncharted territories, and scenes of adventure and exploration. This imagery is designed to invoke feelings of wanderlust, curiosity, and the allure of the unknown." }, { "instruction": "How do Innocent archetype brands convey a message of trust and reliability?", "input": "", "output": "Innocent archetype brands convey trust and reliability by maintaining a consistent, honest, and straightforward approach in their messaging. They often emphasize traditional values, quality, and a commitment to delivering on their promises without complications or hidden agendas." }, { "instruction": "In what ways do Sage archetype brands demonstrate their commitment to truth and accuracy?", "input": "", "output": "Sage archetype brands demonstrate their commitment to truth and accuracy by backing their claims with research, data, and expert validation. They prioritize factual information, logical reasoning, and a rational approach to problems and solutions." }, { "instruction": "What strategies do Explorer archetype brands use to create a sense of individualism and freedom in their products?", "input": "", "output": "Explorer archetype brands create a sense of individualism and freedom by offering customizable options, encouraging personal expression, and promoting the idea that their products are tools for self-exploration and unique experiences." }, { "instruction": "How do Innocent archetype brands create a sense of comfort and security in their marketing?", "input": "", "output": "Innocent archetype brands create a sense of comfort and security by using warm, reassuring tones in their marketing. They focus on creating a friendly, approachable image and often highlight the safety, reliability, and simplicity of their products." }, { "instruction": "What role does nostalgia play in the marketing of Innocent archetype brands?", "input": "", "output": "Nostalgia plays a significant role in the marketing of Innocent archetype brands, as they often evoke memories of a simpler, more idyllic past. This approach aims to create an emotional connection with consumers who yearn for a sense of simplicity and purity in their lives." }, { "instruction": "How do Sage archetype brands position themselves as thought leaders in their industry?", "input": "", "output": "Sage archetype brands position themselves as thought leaders by consistently providing innovative ideas, insightful analysis, and authoritative content. They lead conversations in their industry and are often at the forefront of research and development." }, { "instruction": "In what ways do Explorer archetype brands encourage a sense of adventure in their consumers?", "input": "", "output": "Explorer archetype brands encourage a sense of adventure by showcasing the thrill and excitement of new experiences. They inspire consumers to step out of their comfort zones, explore new horizons, and embrace the journey of self-discovery." }, { "instruction": "How do Innocent archetype brands use simplicity in their product design and functionality?", "input": "", "output": "Innocent archetype brands use simplicity in product design and functionality by focusing on user-friendliness, minimalism, and ease of use. Their products often feature straightforward designs that are intuitive and uncomplicated, appealing to consumers' desire for clarity and efficiency." }, { "instruction": "What methods do Sage archetype brands use to validate their expertise and knowledge?", "input": "", "output": "Sage archetype brands validate their expertise and knowledge by citing scientific studies, showcasing certifications, and highlighting their history of expertise. They often use expert endorsements and factual evidence to reinforce their authority and credibility." }, { "instruction": "What messaging themes do Explorer archetype brands typically use to resonate with their target audience?", "input": "", "output": "Explorer archetype brands typically use messaging themes centered around freedom, exploration, and personal growth. They emphasize the idea of breaking free from the ordinary, embarking on new journeys, and discovering one's true self through adventure." }, { "instruction": "How do Innocent archetype brands communicate their values of purity and wholesomeness?", "input": "", "output": "Innocent archetype brands communicate their values of purity and wholesomeness by using clean, natural imagery, focusing on simplicity in their messaging, and often highlighting organic or natural ingredients in their products." }, { "instruction": "In what ways do Sage archetype brands use their knowledge to solve consumer problems?", "input": "", "output": "Sage archetype brands use their knowledge to solve consumer problems by offering expert advice, providing detailed information, and presenting well-researched solutions. They often position themselves as reliable sources of information that consumers can turn to for solving specific issues." }, { "instruction": "What type of product features do Explorer archetype brands highlight to appeal to their audience's sense of discovery?", "input": "", "output": "Explorer archetype brands highlight product features that enhance the experience of discovery, such as durability in various environments, adaptability to different situations, and innovative design that encourages exploration and adventure." }, { "instruction": "How do Innocent archetype brands create an emotional connection with their audience through their branding?", "input": "", "output": "Innocent archetype brands create an emotional connection with their audience by evoking feelings of warmth, safety, and nostalgia. They often use storytelling, familiar imagery, and a comforting tone in their branding to resonate emotionally with consumers." }, { "instruction": "What is the core desire of the Explorer archetype in branding?", "input": "", "output": "The core desire of the Explorer archetype in branding is the freedom to find out who you are through exploring the world." }, { "instruction": "What is the greatest fear of the Explorer archetype according to Chapter 5?", "input": "", "output": "The greatest fear of the Explorer archetype is getting trapped, conforming, inner emptiness, and nonbeing." }, { "instruction": "How does Starbucks embody the Explorer archetype?", "input": "", "output": "Starbucks embodies the Explorer archetype by offering an experience of global discovery and personal freedom, through its diverse range of coffees from different parts of the world, and its store designs and branding that evoke a sense of adventure." }, { "instruction": "What does the Explorer archetype's journey represent in branding?", "input": "", "output": "In branding, the Explorer archetype's journey represents a search for authenticity and fulfillment, often involving a physical or metaphorical journey to explore new horizons and experiences." }, { "instruction": "What type of products and services resonate with the Explorer archetype?", "input": "", "output": "Products and services that resonate with the Explorer archetype are those that serve as props on journeys of discovery, such as off-road vehicles, travel services, outdoor gear, and brands that emphasize individuality and freedom." }, { "instruction": "Why is 'The Wizard of Oz' an example of the Explorer archetype?", "input": "", "output": "'The Wizard of Oz' is an example of the Explorer archetype because it features a journey of self-discovery and exploration in a new, unknown world, embodying the quest for something greater and the exploration of new horizons." }, { "instruction": "How does the Explorer archetype view sports and physical activities?", "input": "", "output": "The Explorer archetype favors individualistic or solitary sports and physical activities, such as kayaking, skiing, biking, or long-distance running, which offer experiences in nature and opportunities for self-discovery and challenge." }, { "instruction": "What is a common marketing strategy for Explorer archetype brands?", "input": "", "output": "A common marketing strategy for Explorer archetype brands is to appeal to the sense of adventure and discovery, emphasizing freedom, authenticity, and the journey rather than the destination." }, { "instruction": "What is the strategy employed by the Explorer archetype?", "input": "", "output": "The strategy of the Explorer archetype involves journeying, seeking out and experiencing new things, and escaping from entrapment and boredom." }, { "instruction": "What are some of the narrative genres associated with the Explorer archetype?", "input": "", "output": "Narrative genres associated with the Explorer archetype include travelogues, immigrant narratives, science fiction, coming-of-age stories, expatriate literature, and absurdist literature demonstrating human alienation." }, { "instruction": "Can you name a few famous Explorer-brand television series?", "input": "", "output": "Famous Explorer-brand television series include 'The Lone Ranger' and 'Star Trek', which embody the essence of exploration and adventure." }, { "instruction": "What is the typical natural setting for an Explorer's advertisement?", "input": "", "output": "The typical natural setting for an Explorer's advertisement includes big open skies, open roads, nature in its various forms, especially mountains, and the night sky symbolizing infinite possibilities." }, { "instruction": "How does the Explorer archetype influence clothing brands?", "input": "", "output": "Clothing brands influenced by the Explorer archetype offer sturdy clothes that provide freedom of movement, appealing to those who seek adventure and individuality. Brands like Levi’s, REI, Patagonia, and Land’s End are examples." }, { "instruction": "What type of politics is prevalent in Silicon Valley and how does it relate to the Explorer archetype?", "input": "", "output": "Libertarian politics are prevalent in Silicon Valley, resonating with the Explorer archetype's values of independence, laissez-faire attitudes, and empowerment of the individual." }, { "instruction": "How does the Explorer archetype manifest in the consumer market according to 'The Soul of the New Consumer'?", "input": "", "output": "According to 'The Soul of the New Consumer', the Explorer archetype manifests in the consumer market as individuals who are better informed, involved, skeptical of advertising, value authenticity, and are restless and time-conscious." }, { "instruction": "What is the trap of the Explorer archetype?", "input": "", "output": "The trap of the Explorer archetype is aimless wandering and becoming a misfit, losing direction in the pursuit of new experiences and individuality." }, { "instruction": "What are the levels of the Explorer archetype?", "input": "", "output": "The levels of the Explorer archetype include: Level One - hitting the open road and exploring the world, Level Two - seeking individuality and personal fulfillment, and Level Three - expressing individuality and uniqueness." }, { "instruction": "Describe the shadow aspect of the Explorer archetype.", "input": "", "output": "The shadow aspect of the Explorer archetype is characterized by being so alienated that one cannot find any way to fit in, leading to extreme detachment and isolation." }, { "instruction": "What is the appeal of Explorer archetype in sports?", "input": "", "output": "The appeal of the Explorer archetype in sports lies in individualistic or solitary pursuits like kayaking, skiing, or biking, which provide experiences in nature and opportunities for self-discovery." }, { "instruction": "How do Explorer brands like Starbucks and Amazon.com cater to their customers' individualism?", "input": "", "output": "Explorer brands like Starbucks and Amazon.com cater to individualism by offering unique and personalized experiences, such as customized coffee orders or book recommendations based on previous purchases." }, { "instruction": "What makes the United States a quintessential Explorer-brand country?", "input": "", "output": "The United States is a quintessential Explorer-brand country due to its immigrant heritage, emphasis on political rights, and historical narrative of exploration and seeking new horizons." }, { "instruction": "What challenge do Explorer archetype brands face in Asian and Latin American cultures?", "input": "", "output": "Explorer archetype brands face the challenge of adapting to Asian and Latin American cultures that value group relations and community more than individual exploration and independence." }, { "instruction": "How does the Explorer archetype manifest in the fashion choices of adolescents?", "input": "", "output": "The Explorer archetype manifests in the fashion choices of adolescents through styles that confront conventional norms, expressing their individuality and alignment with outsider or counter-culture trends." }, { "instruction": "What role does the Explorer archetype play in midlife transitions?", "input": "", "output": "In midlife transitions, the Explorer archetype often becomes reactivated, leading individuals to seek new experiences, change careers, or embark on journeys of self-discovery and personal fulfillment." }, { "instruction": "How does The Body Shop brand embody the Explorer archetype?", "input": "", "output": "The Body Shop embodies the Explorer archetype by offering products with exotic ingredients, emphasizing global awareness, environmental consciousness, and a spirit of individualistic activism." }, { "instruction": "What kind of work environment does the Explorer archetype promote?", "input": "", "output": "The Explorer archetype promotes a work environment that values individuality, minimizes hierarchy, and encourages innovation and pioneering spirit, often found in startups and creative industries." }, { "instruction": "How do virtual organizations align with the Explorer archetype?", "input": "", "output": "Virtual organizations align with the Explorer archetype by offering flexibility, decentralization, and autonomy, enabling individuals to work independently and pursue innovative approaches." }, { "instruction": "What type of housing might appeal to someone with an Explorer archetype?", "input": "", "output": "Housing that appeals to someone with an Explorer archetype typically includes open, natural spaces with expansive views, like those offered by Timberpeg homes, emphasizing a connection with nature and freedom." }, { "instruction": "Why are brands like Starbucks and Amazon.com considered Explorer brands?", "input": "", "output": "Brands like Starbucks and Amazon.com are considered Explorer brands because they offer unique and customizable experiences that resonate with the desire for individuality, exploration, and personal freedom." }, { "instruction": "What is the relationship between the Explorer archetype and technology products?", "input": "", "output": "The relationship between the Explorer archetype and technology products lies in the emphasis on personal empowerment, innovation, and the ability to access a world of information and experiences, aligning with the Explorer's quest for knowledge and freedom." }, { "instruction": "How does the Explorer archetype influence customer loyalty?", "input": "", "output": "The Explorer archetype influences customer loyalty by appealing to the values of authenticity, individuality, and adventure. Brands that successfully embody these values can foster strong loyalty among Explorer-oriented consumers." }, { "instruction": "What is the significance of the name 'Starbucks' in relation to the Explorer archetype?", "input": "", "output": "The name 'Starbucks', derived from the novel 'Moby Dick', evokes a sense of adventure and exploration, aligning with the Explorer archetype by suggesting a journey into the unknown and the pursuit of new experiences." }, { "instruction": "How does the Explorer archetype manifest in the branding of off-road vehicles?", "input": "", "output": "Off-road vehicle brands like Jeep Wrangler embody the Explorer archetype by emphasizing adventure, freedom, and the ability to navigate uncharted territories, appealing to consumers' desire for exploration and discovery." }, { "instruction": "What is the core desire of the Explorer archetype in branding?", "input": "", "output": "The core desire of the Explorer archetype in branding is the freedom to find out who you are through exploring the world, focusing on experiences that promote self-discovery and authenticity." }, { "instruction": "How does Starbucks use the Explorer archetype in its branding?", "input": "", "output": "Starbucks utilizes the Explorer archetype by presenting coffee as an exotic, adventurous experience, drawing from diverse global origins, and creating an atmosphere that encourages exploration and individuality." }, { "instruction": "In what way does Amazon.com reflect the Explorer archetype?", "input": "", "output": "Amazon.com reflects the Explorer archetype by offering a vast and diverse range of products, enabling customers to explore and discover new items and ideas from the comfort of their own home." }, { "instruction": "What role does nature play in the Explorer archetype for branding?", "input": "", "output": "Nature plays a significant role in the Explorer archetype for branding, as it symbolizes uncharted territory and the joy of discovery. Brands that associate with nature often appeal to the Explorer's desire for adventure and exploration in the natural world." }, { "instruction": "How do individual sports relate to the Explorer archetype?", "input": "", "output": "Individual sports like kayaking, skiing, and biking resonate with the Explorer archetype by emphasizing personal challenges, exploration of nature, and the journey of self-discovery through solitary or unique physical experiences." }, { "instruction": "What is the relationship between the Explorer archetype and immigrant narratives?", "input": "", "output": "Immigrant narratives often embody the Explorer archetype by depicting journeys into new lands and cultures, symbolizing the quest for a better life, personal growth, and the exploration of unknown territories." }, { "instruction": "How does the branding of boats appeal to the Explorer archetype?", "input": "", "output": "Boat branding appeals to the Explorer archetype by emphasizing the freedom and adventure of the open seas, inviting consumers to embark on journeys of discovery and exploration in aquatic environments." }, { "instruction": "In what way does the Explorer archetype influence fashion branding?", "input": "", "output": "Fashion brands influenced by the Explorer archetype often feature durable, functional, and versatile clothing, appealing to consumers' desire for apparel that supports adventure and individual expression." }, { "instruction": "How does The Great Gatsby reflect the Explorer archetype?", "input": "", "output": "The Great Gatsby reflects the Explorer archetype through its themes of seeking a new identity, the pursuit of the American Dream, and the exploration of the social landscape of the Jazz Age, representing a journey of self-discovery and aspiration." }, { "instruction": "What aspect of the Explorer archetype is captured in the success of travelogues?", "input": "", "output": "The success of travelogues captures the Explorer archetype's thirst for discovery, adventure, and the experience of diverse cultures and landscapes, appealing to the inherent desire to explore the unknown." }, { "instruction": "How do expatriate literature and the Explorer archetype relate?", "input": "", "output": "Expatriate literature relates to the Explorer archetype by depicting the challenges and rewards of living in a foreign culture, reflecting the journey of self-discovery and the quest for a sense of belonging in a new world." }, { "instruction": "What is the significance of the 'open road' in Explorer archetype branding?", "input": "", "output": "The 'open road' in Explorer archetype branding symbolizes freedom, endless possibilities, and the journey towards self-discovery. It appeals to consumers' longing for adventure and escape from the mundane." }, { "instruction": "How does the slogan 'No Boundaries' reflect the Explorer archetype for the Ford Explorer?", "input": "", "output": "The slogan 'No Boundaries' for the Ford Explorer reflects the Explorer archetype by emphasizing limitless exploration, the freedom to traverse diverse terrains, and the vehicle's capability to support adventurous lifestyles." }, { "instruction": "What is the connection between the Explorer archetype and coming-of-age stories?", "input": "", "output": "Coming-of-age stories connect with the Explorer archetype through their focus on personal growth, the journey from innocence to experience, and the exploration of identity and the wider world." }, { "instruction": "How does the archetype of the Explorer relate to absurdist literature?", "input": "", "output": "Absurdist literature relates to the Explorer archetype by highlighting the quest for meaning in a seemingly chaotic world, reflecting the Explorer's journey towards understanding and navigating human alienation." }, { "instruction": "What role does 'The Odyssey' by Homer play in exemplifying the Explorer archetype?", "input": "", "output": "Homer's 'The Odyssey' is a classic example of the Explorer archetype, depicting a hero's epic journey full of challenges and discoveries, symbolizing the quest for adventure, knowledge, and homecoming." }, { "instruction": "How is the Explorer archetype represented in science fiction?", "input": "", "output": "Science fiction often represents the Explorer archetype through stories of space exploration and discovery of new worlds, embodying the human quest for knowledge and the unknown." }, { "instruction": "What is the relationship between the Explorer archetype and literature about seeking the promised land?", "input": "", "output": "Literature about seeking the promised land resonates with the Explorer archetype by embodying the search for a utopian place of fulfillment and happiness, a journey that mirrors the Explorer's quest for a better world." }, { "instruction": "How does the Explorer archetype manifest in the branding of Yamaha motorcycles?", "input": "", "output": "Yamaha motorcycles embody the Explorer archetype by promoting independence, adventure, and the thrill of the open road, appealing to consumers' desire for freedom and exploration." }, { "instruction": "What is the significance of the slogan 'Take your body where your mind has already wandered' for Jeep Wrangler?", "input": "", "output": "Jeep Wrangler's slogan 'Take your body where your mind has already wandered' captures the essence of the Explorer archetype, encouraging adventure, curiosity, and the pursuit of one's desires through exploration." }, { "instruction": "How does the Explorer archetype influence the branding of sports equipment?", "input": "", "output": "Sports equipment brands influenced by the Explorer archetype focus on enabling adventure, individual challenges, and interaction with nature, appealing to consumers' desire for physical exploration and personal achievement." }, { "instruction": "What is the connection between the Explorer archetype and the theme of leaving marriages, jobs, or towns in literature?", "input": "", "output": "Themes of leaving marriages, jobs, or towns in literature connect with the Explorer archetype by depicting the pursuit of self-discovery, liberation from constraints, and the journey towards a more authentic life." }, { "instruction": "How does the Explorer archetype resonate with the television series 'Star Trek'?", "input": "", "output": "The television series 'Star Trek' resonates with the Explorer archetype through its focus on space exploration, encountering new civilizations, and the quest for knowledge and adventure beyond the known universe." }, { "instruction": "What characteristics of the Explorer archetype are evident in Jack Kerouac's 'On the Road'?", "input": "", "output": "Jack Kerouac's 'On the Road' exemplifies the Explorer archetype with its themes of freedom, self-discovery, and nonconformity, portraying a journey across America that mirrors the internal quest for identity and meaning." }, { "instruction": "How does the Explorer archetype influence the marketing of outdoor clothing brands?", "input": "", "output": "Outdoor clothing brands influenced by the Explorer archetype focus on durability, functionality, and versatility, catering to consumers' desire for apparel that supports their adventurous and individualistic lifestyles." }, { "instruction": "In what way does the slogan 'Your digital images. Your music. Your work. Your ideas.' for Sony's Memory Stick resonate with the Explorer archetype?", "input": "", "output": "Sony's Memory Stick slogan 'Your digital images. Your music. Your work. Your ideas.' resonates with the Explorer archetype by emphasizing personal freedom, the storage and expression of individual experiences, and the journey of self-documentation." }, { "instruction": "How does folk music embody the Explorer archetype?", "input": "", "output": "Folk music embodies the Explorer archetype through its storytelling of journeys, struggles, and discoveries, often reflecting themes of adventure, personal growth, and the quest for freedom." }, { "instruction": "What aspect of the Explorer archetype is captured in Mark Twain's 'The Adventures of Huckleberry Finn'?", "input": "", "output": "Mark Twain's 'The Adventures of Huckleberry Finn' captures the Explorer archetype through its themes of adventure, freedom, and the protagonist's journey of self-discovery along the Mississippi River." }, { "instruction": "How does 'The Waste Land' by T.S. Eliot relate to the Explorer archetype?", "input": "", "output": "T.S. Eliot's 'The Waste Land' relates to the Explorer archetype by exploring themes of spiritual emptiness and the search for meaning in a fragmented world, reflecting the Explorer's quest for understanding and wholeness." } ]
[ -0.4412669241428375, -0.32781293988227844, 0.20519229769706726, -0.21402718126773834, -0.3266434669494629, 0.22860944271087646, 0.25777751207351685, -0.591261088848114, 0.5015841126441956, 0.4900103509426117, -0.7571783661842346, -0.4938277304172516, -0.4282042682170868, -0.319604158401489...
null
null
null
null
null
null
null
null
null
null
null
null
null
Simonk97/KTQ
Simonk97
2023-11-13T00:17:14Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-13T00:17:14Z
2023-11-13T00:16:09.000Z
2023-11-13T00:16:09
--- license: openrail ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
allenai/blog-images
allenai
2023-11-22T19:41:07Z
0
0
null
[ "region:us" ]
2023-11-22T19:41:07Z
2023-11-13T00:41:19.000Z
2023-11-13T00:41:19
# AI2 Blog / Model Images Add blog / model images to this dataset. For us by anyone at AI2. Please create a new folder by project. Must be public so people outside of hf/ai2 can see it!
[ -0.44296717643737793, -0.027965646237134933, 0.21190766990184784, 0.7037463188171387, -0.21196720004081726, 0.09981467574834824, 0.42700809240341187, -0.5942214131355286, 0.1663268804550171, 0.9169023036956787, -0.5939283967018127, -0.8609689474105835, -0.465279757976532, 0.060107834637165...
null
null
null
null
null
null
null
null
null
null
null
null
null
linlanio/lldataset-zhishi-v1
linlanio
2023-11-13T01:20:00Z
0
0
null
[ "task_categories:summarization", "size_categories:10K<n<100K", "language:zh", "license:apache-2.0", "biology", "region:us" ]
2023-11-13T01:20:00Z
2023-11-13T00:51:02.000Z
2023-11-13T00:51:02
--- license: apache-2.0 task_categories: - summarization language: - zh tags: - biology size_categories: - 10K<n<100K --- # 数据集 ## 介绍 ## 特点 ## 如何使用 ## 参考资料 1. https://github.com/QwenLM/Qwen-7B ## 联系我们 网站:https://www.linlan.io 邮箱:contact@linlan.io
[ -0.12481427192687988, -0.256422221660614, 0.16001948714256287, 0.8093302249908447, -0.740409255027771, 0.12047803401947021, 0.3192742168903351, -0.3159847557544708, 0.8113998174667358, 0.7884334325790405, -0.49620673060417175, -0.8212000131607056, -0.37889161705970764, -0.169575497508049, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
yuufong/PhoNer_Covid19_Word
yuufong
2023-11-13T01:20:16Z
0
0
null
[ "region:us" ]
2023-11-13T01:20:16Z
2023-11-13T01:20:13.000Z
2023-11-13T01:20:13
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: validation path: data/validation-* - split: test path: data/test-* dataset_info: features: - name: words sequence: string - name: tags sequence: string splits: - name: train num_bytes: 2249018 num_examples: 5027 - name: validation num_bytes: 979915 num_examples: 2000 - name: test num_bytes: 979915 num_examples: 2000 download_size: 685192 dataset_size: 4208848 --- # Dataset Card for "PhoNer_Covid19_Word" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.34719765186309814, -0.2870335578918457, -0.14404423534870148, 0.23304857313632965, -0.32312244176864624, 0.03223632648587227, 0.1775927096605301, -0.1928168684244156, 0.7731804251670837, 0.3289632797241211, -0.6709898114204407, -0.699673593044281, -0.5369578003883362, -0.221390396356582...
null
null
null
null
null
null
null
null
null
null
null
null
null
danielz01/fire-risk
danielz01
2023-11-13T01:59:11Z
0
0
null
[ "region:us" ]
2023-11-13T01:59:11Z
2023-11-13T01:36:58.000Z
2023-11-13T01:36:58
--- dataset_info: features: - name: image dtype: image - name: label dtype: string - name: path dtype: string splits: - name: train num_bytes: 11882545100.302 num_examples: 70331 - name: val num_bytes: 3793822470.676 num_examples: 21541 download_size: 15132678783 dataset_size: 15676367570.978 configs: - config_name: default data_files: - split: train path: data/train-* - split: val path: data/val-* --- # Dataset Card for "fire-risk" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.2526620924472809, -0.17758122086524963, 0.05687664449214935, 0.3197783827781677, -0.01602930575609207, -0.060061994940042496, 0.46634435653686523, -0.12750405073165894, 0.6954702734947205, 0.4914008378982544, -0.7908411026000977, -0.7253191471099854, -0.6134573817253113, -0.566210329532...
null
null
null
null
null
null
null
null
null
null
null
null
null
yxchng/cc12m_13112023
yxchng
2023-11-15T06:34:20Z
0
0
null
[ "region:us" ]
2023-11-15T06:34:20Z
2023-11-13T02:02:43.000Z
2023-11-13T02:02:43
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
Back-up/qa-temp-v2
Back-up
2023-11-13T02:19:44Z
0
0
null
[ "region:us" ]
2023-11-13T02:19:44Z
2023-11-13T02:19:40.000Z
2023-11-13T02:19:40
--- dataset_info: features: - name: id dtype: string - name: title dtype: string - name: context dtype: string - name: question dtype: string - name: response struct: - name: response dtype: string - name: answers struct: - name: answer_start sequence: int64 - name: text sequence: string - name: instruction dtype: string - name: prompt_name dtype: string splits: - name: train num_bytes: 37455 num_examples: 11 download_size: 42609 dataset_size: 37455 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "qa-temp-v2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.32157817482948303, -0.10815969109535217, 0.36416444182395935, 0.07471498101949692, -0.44384765625, -0.10741767287254333, 0.5806865692138672, -0.15461140871047974, 0.7789354920387268, 0.40081533789634705, -0.6569325923919678, -0.6073886752128601, -0.3685263693332672, -0.38518422842025757...
null
null
null
null
null
null
null
null
null
null
null
null
null
Back-up/temp-qa-context-no-keyword
Back-up
2023-11-13T06:43:43Z
0
0
null
[ "region:us" ]
2023-11-13T06:43:43Z
2023-11-13T02:32:26.000Z
2023-11-13T02:32:26
--- dataset_info: features: - name: id dtype: string - name: title dtype: string - name: context dtype: string - name: question dtype: string - name: response struct: - name: response dtype: string - name: answers struct: - name: answer_start sequence: int64 - name: text sequence: string - name: instruction dtype: string - name: prompt_name dtype: string splits: - name: train num_bytes: 10315892 num_examples: 3802 download_size: 3940839 dataset_size: 10315892 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "temp-qa-context-no-keyword" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.47681981325149536, -0.2650907635688782, 0.39056771993637085, 0.019601836800575256, -0.5964928269386292, -0.16752256453037262, 0.18182718753814697, 0.2769187092781067, 0.7837199568748474, 0.47061482071876526, -0.7296625971794128, -1.0514720678329468, -0.34725168347358704, -0.183646395802...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_maywell__Synatra-11B-Testbench_public
open-llm-leaderboard
2023-11-13T03:02:45Z
0
0
null
[ "region:us" ]
2023-11-13T03:02:45Z
2023-11-13T03:01:43.000Z
2023-11-13T03:01:43
--- pretty_name: Evaluation run of maywell/Synatra-11B-Testbench dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [maywell/Synatra-11B-Testbench](https://huggingface.co/maywell/Synatra-11B-Testbench)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_maywell__Synatra-11B-Testbench_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-13T02:58:44.682029](https://huggingface.co/datasets/open-llm-leaderboard/details_maywell__Synatra-11B-Testbench_public/blob/main/results_2023-11-13T02-58-44.682029.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5527127345036476,\n\ \ \"acc_stderr\": 0.033976423589140915,\n \"acc_norm\": 0.5597882201139855,\n\ \ \"acc_norm_stderr\": 0.03472937395706884,\n \"mc1\": 0.3733170134638923,\n\ \ \"mc1_stderr\": 0.016932370557570634,\n \"mc2\": 0.5196984500314547,\n\ \ \"mc2_stderr\": 0.015094792312219791,\n \"em\": 0.012688758389261746,\n\ \ \"em_stderr\": 0.0011462418380586376,\n \"f1\": 0.07765100671140933,\n\ \ \"f1_stderr\": 0.0018276339186485593\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5273037542662116,\n \"acc_stderr\": 0.014589589101985996,\n\ \ \"acc_norm\": 0.5733788395904437,\n \"acc_norm_stderr\": 0.014453185592920293\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.5855407289384584,\n\ \ \"acc_stderr\": 0.004916216503770336,\n \"acc_norm\": 0.7865962955586536,\n\ \ \"acc_norm_stderr\": 0.004088730085367332\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.48148148148148145,\n\ \ \"acc_stderr\": 0.043163785995113245,\n \"acc_norm\": 0.48148148148148145,\n\ \ \"acc_norm_stderr\": 0.043163785995113245\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5526315789473685,\n \"acc_stderr\": 0.04046336883978252,\n\ \ \"acc_norm\": 0.5526315789473685,\n \"acc_norm_stderr\": 0.04046336883978252\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n\ \ \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.54,\n \ \ \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6113207547169811,\n \"acc_stderr\": 0.030000485448675986,\n\ \ \"acc_norm\": 0.6113207547169811,\n \"acc_norm_stderr\": 0.030000485448675986\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6041666666666666,\n\ \ \"acc_stderr\": 0.04089465449325582,\n \"acc_norm\": 0.6041666666666666,\n\ \ \"acc_norm_stderr\": 0.04089465449325582\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n\ \ \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n\ \ \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5895953757225434,\n\ \ \"acc_stderr\": 0.03750757044895536,\n \"acc_norm\": 0.5895953757225434,\n\ \ \"acc_norm_stderr\": 0.03750757044895536\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n\ \ \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n\ \ \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.5148936170212766,\n \"acc_stderr\": 0.03267151848924777,\n\ \ \"acc_norm\": 0.5148936170212766,\n \"acc_norm_stderr\": 0.03267151848924777\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.37719298245614036,\n\ \ \"acc_stderr\": 0.04559522141958216,\n \"acc_norm\": 0.37719298245614036,\n\ \ \"acc_norm_stderr\": 0.04559522141958216\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n\ \ \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.34656084656084657,\n \"acc_stderr\": 0.024508777521028424,\n \"\ acc_norm\": 0.34656084656084657,\n \"acc_norm_stderr\": 0.024508777521028424\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3333333333333333,\n\ \ \"acc_stderr\": 0.04216370213557835,\n \"acc_norm\": 0.3333333333333333,\n\ \ \"acc_norm_stderr\": 0.04216370213557835\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n\ \ \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6483870967741936,\n\ \ \"acc_stderr\": 0.027162537826948458,\n \"acc_norm\": 0.6483870967741936,\n\ \ \"acc_norm_stderr\": 0.027162537826948458\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.4088669950738916,\n \"acc_stderr\": 0.034590588158832314,\n\ \ \"acc_norm\": 0.4088669950738916,\n \"acc_norm_stderr\": 0.034590588158832314\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\"\ : 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.6787878787878788,\n \"acc_stderr\": 0.0364620496325381,\n\ \ \"acc_norm\": 0.6787878787878788,\n \"acc_norm_stderr\": 0.0364620496325381\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7070707070707071,\n \"acc_stderr\": 0.03242497958178815,\n \"\ acc_norm\": 0.7070707070707071,\n \"acc_norm_stderr\": 0.03242497958178815\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7357512953367875,\n \"acc_stderr\": 0.03182155050916647,\n\ \ \"acc_norm\": 0.7357512953367875,\n \"acc_norm_stderr\": 0.03182155050916647\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5333333333333333,\n \"acc_stderr\": 0.02529460802398647,\n \ \ \"acc_norm\": 0.5333333333333333,\n \"acc_norm_stderr\": 0.02529460802398647\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.27037037037037037,\n \"acc_stderr\": 0.02708037281514565,\n \ \ \"acc_norm\": 0.27037037037037037,\n \"acc_norm_stderr\": 0.02708037281514565\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5168067226890757,\n \"acc_stderr\": 0.03246013680375308,\n \ \ \"acc_norm\": 0.5168067226890757,\n \"acc_norm_stderr\": 0.03246013680375308\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"\ acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7376146788990826,\n \"acc_stderr\": 0.018861885021534734,\n \"\ acc_norm\": 0.7376146788990826,\n \"acc_norm_stderr\": 0.018861885021534734\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.4398148148148148,\n \"acc_stderr\": 0.03385177976044811,\n \"\ acc_norm\": 0.4398148148148148,\n \"acc_norm_stderr\": 0.03385177976044811\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7058823529411765,\n \"acc_stderr\": 0.03198001660115071,\n \"\ acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.03198001660115071\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.6919831223628692,\n \"acc_stderr\": 0.030052389335605702,\n \ \ \"acc_norm\": 0.6919831223628692,\n \"acc_norm_stderr\": 0.030052389335605702\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6143497757847534,\n\ \ \"acc_stderr\": 0.03266842214289201,\n \"acc_norm\": 0.6143497757847534,\n\ \ \"acc_norm_stderr\": 0.03266842214289201\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6641221374045801,\n \"acc_stderr\": 0.041423137719966634,\n\ \ \"acc_norm\": 0.6641221374045801,\n \"acc_norm_stderr\": 0.041423137719966634\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7024793388429752,\n \"acc_stderr\": 0.04173349148083499,\n \"\ acc_norm\": 0.7024793388429752,\n \"acc_norm_stderr\": 0.04173349148083499\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6759259259259259,\n\ \ \"acc_stderr\": 0.045245960070300476,\n \"acc_norm\": 0.6759259259259259,\n\ \ \"acc_norm_stderr\": 0.045245960070300476\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6196319018404908,\n \"acc_stderr\": 0.038142698932618374,\n\ \ \"acc_norm\": 0.6196319018404908,\n \"acc_norm_stderr\": 0.038142698932618374\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.38392857142857145,\n\ \ \"acc_stderr\": 0.04616143075028547,\n \"acc_norm\": 0.38392857142857145,\n\ \ \"acc_norm_stderr\": 0.04616143075028547\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.6893203883495146,\n \"acc_stderr\": 0.04582124160161551,\n\ \ \"acc_norm\": 0.6893203883495146,\n \"acc_norm_stderr\": 0.04582124160161551\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8205128205128205,\n\ \ \"acc_stderr\": 0.025140935950335445,\n \"acc_norm\": 0.8205128205128205,\n\ \ \"acc_norm_stderr\": 0.025140935950335445\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7484035759897829,\n\ \ \"acc_stderr\": 0.015517322365529631,\n \"acc_norm\": 0.7484035759897829,\n\ \ \"acc_norm_stderr\": 0.015517322365529631\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.025906632631016124,\n\ \ \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.025906632631016124\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.32737430167597764,\n\ \ \"acc_stderr\": 0.015694238967737386,\n \"acc_norm\": 0.32737430167597764,\n\ \ \"acc_norm_stderr\": 0.015694238967737386\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.028431095444176643,\n\ \ \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.028431095444176643\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6430868167202572,\n\ \ \"acc_stderr\": 0.027210420375934026,\n \"acc_norm\": 0.6430868167202572,\n\ \ \"acc_norm_stderr\": 0.027210420375934026\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.5925925925925926,\n \"acc_stderr\": 0.027339546640662734,\n\ \ \"acc_norm\": 0.5925925925925926,\n \"acc_norm_stderr\": 0.027339546640662734\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.39361702127659576,\n \"acc_stderr\": 0.029144544781596147,\n \ \ \"acc_norm\": 0.39361702127659576,\n \"acc_norm_stderr\": 0.029144544781596147\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.38852672750977835,\n\ \ \"acc_stderr\": 0.012448817838292355,\n \"acc_norm\": 0.38852672750977835,\n\ \ \"acc_norm_stderr\": 0.012448817838292355\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.030372836961539352,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.030372836961539352\n \ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\"\ : 0.5326797385620915,\n \"acc_stderr\": 0.0201845833591022,\n \"acc_norm\"\ : 0.5326797385620915,\n \"acc_norm_stderr\": 0.0201845833591022\n },\n\ \ \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n \ \ \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \ \ \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.6285714285714286,\n \"acc_stderr\": 0.030932858792789855,\n\ \ \"acc_norm\": 0.6285714285714286,\n \"acc_norm_stderr\": 0.030932858792789855\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7313432835820896,\n\ \ \"acc_stderr\": 0.03134328358208954,\n \"acc_norm\": 0.7313432835820896,\n\ \ \"acc_norm_stderr\": 0.03134328358208954\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.82,\n \"acc_stderr\": 0.03861229196653694,\n \ \ \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.03861229196653694\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.43373493975903615,\n\ \ \"acc_stderr\": 0.038581589406855174,\n \"acc_norm\": 0.43373493975903615,\n\ \ \"acc_norm_stderr\": 0.038581589406855174\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7134502923976608,\n \"acc_stderr\": 0.03467826685703826,\n\ \ \"acc_norm\": 0.7134502923976608,\n \"acc_norm_stderr\": 0.03467826685703826\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3733170134638923,\n\ \ \"mc1_stderr\": 0.016932370557570634,\n \"mc2\": 0.5196984500314547,\n\ \ \"mc2_stderr\": 0.015094792312219791\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7576953433307024,\n \"acc_stderr\": 0.01204235252617479\n\ \ },\n \"harness|drop|3\": {\n \"em\": 0.012688758389261746,\n \ \ \"em_stderr\": 0.0011462418380586376,\n \"f1\": 0.07765100671140933,\n\ \ \"f1_stderr\": 0.0018276339186485593\n },\n \"harness|gsm8k|5\":\ \ {\n \"acc\": 0.177407126611069,\n \"acc_stderr\": 0.010522533016890793\n\ \ }\n}\n```" repo_url: https://huggingface.co/maywell/Synatra-11B-Testbench leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|arc:challenge|25_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-13T02-58-44.682029.parquet' - config_name: harness_drop_3 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|drop|3_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-13T02-58-44.682029.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|gsm8k|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hellaswag|10_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-13T02-58-44.682029.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-management|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-13T02-58-44.682029.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|truthfulqa:mc|0_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-13T02-58-44.682029.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_13T02_58_44.682029 path: - '**/details_harness|winogrande|5_2023-11-13T02-58-44.682029.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-13T02-58-44.682029.parquet' - config_name: results data_files: - split: 2023_11_13T02_58_44.682029 path: - results_2023-11-13T02-58-44.682029.parquet - split: latest path: - results_2023-11-13T02-58-44.682029.parquet --- # Dataset Card for Evaluation run of maywell/Synatra-11B-Testbench ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/maywell/Synatra-11B-Testbench - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [maywell/Synatra-11B-Testbench](https://huggingface.co/maywell/Synatra-11B-Testbench) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_maywell__Synatra-11B-Testbench_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-13T02:58:44.682029](https://huggingface.co/datasets/open-llm-leaderboard/details_maywell__Synatra-11B-Testbench_public/blob/main/results_2023-11-13T02-58-44.682029.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5527127345036476, "acc_stderr": 0.033976423589140915, "acc_norm": 0.5597882201139855, "acc_norm_stderr": 0.03472937395706884, "mc1": 0.3733170134638923, "mc1_stderr": 0.016932370557570634, "mc2": 0.5196984500314547, "mc2_stderr": 0.015094792312219791, "em": 0.012688758389261746, "em_stderr": 0.0011462418380586376, "f1": 0.07765100671140933, "f1_stderr": 0.0018276339186485593 }, "harness|arc:challenge|25": { "acc": 0.5273037542662116, "acc_stderr": 0.014589589101985996, "acc_norm": 0.5733788395904437, "acc_norm_stderr": 0.014453185592920293 }, "harness|hellaswag|10": { "acc": 0.5855407289384584, "acc_stderr": 0.004916216503770336, "acc_norm": 0.7865962955586536, "acc_norm_stderr": 0.004088730085367332 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.48148148148148145, "acc_stderr": 0.043163785995113245, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.043163785995113245 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5526315789473685, "acc_stderr": 0.04046336883978252, "acc_norm": 0.5526315789473685, "acc_norm_stderr": 0.04046336883978252 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620332, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6113207547169811, "acc_stderr": 0.030000485448675986, "acc_norm": 0.6113207547169811, "acc_norm_stderr": 0.030000485448675986 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6041666666666666, "acc_stderr": 0.04089465449325582, "acc_norm": 0.6041666666666666, "acc_norm_stderr": 0.04089465449325582 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5895953757225434, "acc_stderr": 0.03750757044895536, "acc_norm": 0.5895953757225434, "acc_norm_stderr": 0.03750757044895536 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.28431372549019607, "acc_stderr": 0.04488482852329017, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.04488482852329017 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5148936170212766, "acc_stderr": 0.03267151848924777, "acc_norm": 0.5148936170212766, "acc_norm_stderr": 0.03267151848924777 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.37719298245614036, "acc_stderr": 0.04559522141958216, "acc_norm": 0.37719298245614036, "acc_norm_stderr": 0.04559522141958216 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.04154659671707548, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.04154659671707548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.34656084656084657, "acc_stderr": 0.024508777521028424, "acc_norm": 0.34656084656084657, "acc_norm_stderr": 0.024508777521028424 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3333333333333333, "acc_stderr": 0.04216370213557835, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.04216370213557835 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6483870967741936, "acc_stderr": 0.027162537826948458, "acc_norm": 0.6483870967741936, "acc_norm_stderr": 0.027162537826948458 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4088669950738916, "acc_stderr": 0.034590588158832314, "acc_norm": 0.4088669950738916, "acc_norm_stderr": 0.034590588158832314 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6787878787878788, "acc_stderr": 0.0364620496325381, "acc_norm": 0.6787878787878788, "acc_norm_stderr": 0.0364620496325381 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7070707070707071, "acc_stderr": 0.03242497958178815, "acc_norm": 0.7070707070707071, "acc_norm_stderr": 0.03242497958178815 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7357512953367875, "acc_stderr": 0.03182155050916647, "acc_norm": 0.7357512953367875, "acc_norm_stderr": 0.03182155050916647 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5333333333333333, "acc_stderr": 0.02529460802398647, "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.02529460802398647 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.02708037281514565, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.02708037281514565 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5168067226890757, "acc_stderr": 0.03246013680375308, "acc_norm": 0.5168067226890757, "acc_norm_stderr": 0.03246013680375308 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7376146788990826, "acc_stderr": 0.018861885021534734, "acc_norm": 0.7376146788990826, "acc_norm_stderr": 0.018861885021534734 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4398148148148148, "acc_stderr": 0.03385177976044811, "acc_norm": 0.4398148148148148, "acc_norm_stderr": 0.03385177976044811 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7058823529411765, "acc_stderr": 0.03198001660115071, "acc_norm": 0.7058823529411765, "acc_norm_stderr": 0.03198001660115071 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.6919831223628692, "acc_stderr": 0.030052389335605702, "acc_norm": 0.6919831223628692, "acc_norm_stderr": 0.030052389335605702 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6143497757847534, "acc_stderr": 0.03266842214289201, "acc_norm": 0.6143497757847534, "acc_norm_stderr": 0.03266842214289201 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6641221374045801, "acc_stderr": 0.041423137719966634, "acc_norm": 0.6641221374045801, "acc_norm_stderr": 0.041423137719966634 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7024793388429752, "acc_stderr": 0.04173349148083499, "acc_norm": 0.7024793388429752, "acc_norm_stderr": 0.04173349148083499 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6759259259259259, "acc_stderr": 0.045245960070300476, "acc_norm": 0.6759259259259259, "acc_norm_stderr": 0.045245960070300476 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6196319018404908, "acc_stderr": 0.038142698932618374, "acc_norm": 0.6196319018404908, "acc_norm_stderr": 0.038142698932618374 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.38392857142857145, "acc_stderr": 0.04616143075028547, "acc_norm": 0.38392857142857145, "acc_norm_stderr": 0.04616143075028547 }, "harness|hendrycksTest-management|5": { "acc": 0.6893203883495146, "acc_stderr": 0.04582124160161551, "acc_norm": 0.6893203883495146, "acc_norm_stderr": 0.04582124160161551 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8205128205128205, "acc_stderr": 0.025140935950335445, "acc_norm": 0.8205128205128205, "acc_norm_stderr": 0.025140935950335445 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7484035759897829, "acc_stderr": 0.015517322365529631, "acc_norm": 0.7484035759897829, "acc_norm_stderr": 0.015517322365529631 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6358381502890174, "acc_stderr": 0.025906632631016124, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.025906632631016124 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.32737430167597764, "acc_stderr": 0.015694238967737386, "acc_norm": 0.32737430167597764, "acc_norm_stderr": 0.015694238967737386 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5588235294117647, "acc_stderr": 0.028431095444176643, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.028431095444176643 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6430868167202572, "acc_stderr": 0.027210420375934026, "acc_norm": 0.6430868167202572, "acc_norm_stderr": 0.027210420375934026 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5925925925925926, "acc_stderr": 0.027339546640662734, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.027339546640662734 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.39361702127659576, "acc_stderr": 0.029144544781596147, "acc_norm": 0.39361702127659576, "acc_norm_stderr": 0.029144544781596147 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.38852672750977835, "acc_stderr": 0.012448817838292355, "acc_norm": 0.38852672750977835, "acc_norm_stderr": 0.012448817838292355 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5, "acc_stderr": 0.030372836961539352, "acc_norm": 0.5, "acc_norm_stderr": 0.030372836961539352 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5326797385620915, "acc_stderr": 0.0201845833591022, "acc_norm": 0.5326797385620915, "acc_norm_stderr": 0.0201845833591022 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6285714285714286, "acc_stderr": 0.030932858792789855, "acc_norm": 0.6285714285714286, "acc_norm_stderr": 0.030932858792789855 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7313432835820896, "acc_stderr": 0.03134328358208954, "acc_norm": 0.7313432835820896, "acc_norm_stderr": 0.03134328358208954 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.03861229196653694, "acc_norm": 0.82, "acc_norm_stderr": 0.03861229196653694 }, "harness|hendrycksTest-virology|5": { "acc": 0.43373493975903615, "acc_stderr": 0.038581589406855174, "acc_norm": 0.43373493975903615, "acc_norm_stderr": 0.038581589406855174 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7134502923976608, "acc_stderr": 0.03467826685703826, "acc_norm": 0.7134502923976608, "acc_norm_stderr": 0.03467826685703826 }, "harness|truthfulqa:mc|0": { "mc1": 0.3733170134638923, "mc1_stderr": 0.016932370557570634, "mc2": 0.5196984500314547, "mc2_stderr": 0.015094792312219791 }, "harness|winogrande|5": { "acc": 0.7576953433307024, "acc_stderr": 0.01204235252617479 }, "harness|drop|3": { "em": 0.012688758389261746, "em_stderr": 0.0011462418380586376, "f1": 0.07765100671140933, "f1_stderr": 0.0018276339186485593 }, "harness|gsm8k|5": { "acc": 0.177407126611069, "acc_stderr": 0.010522533016890793 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7083913087844849, -0.8389762043952942, 0.26313087344169617, 0.1773633509874344, -0.16882909834384918, -0.06971672177314758, 0.030426301062107086, -0.23279206454753876, 0.5512698292732239, -0.07737661153078079, -0.49329161643981934, -0.7071593403816223, -0.433843195438385, 0.248985365033...
null
null
null
null
null
null
null
null
null
null
null
null
null
nlplabtdtu/Cotich-sentence-processing-vi
nlplabtdtu
2023-11-13T03:25:28Z
0
0
null
[ "region:us" ]
2023-11-13T03:25:28Z
2023-11-13T03:25:10.000Z
2023-11-13T03:25:10
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
wangzhang/mongoDB_community
wangzhang
2023-11-13T03:36:32Z
0
0
null
[ "region:us" ]
2023-11-13T03:36:32Z
2023-11-13T03:33:14.000Z
2023-11-13T03:33:14
This is a full view of the data from the MongoDB community (exclude all posts without any responses)
[ -0.48357006907463074, -0.7643002867698669, 0.5566325187683105, -0.22488534450531006, -0.24651050567626953, 0.2557624578475952, 0.2571450173854828, -0.3518412411212921, 1.3097764253616333, 1.2125548124313354, -1.1181610822677612, -1.1121200323104858, -0.5574924945831299, -0.1377116590738296...
null
null
null
null
null
null
null
null
null
null
null
null
null
wangzhang/mongoDB_community_hot
wangzhang
2023-11-13T04:32:39Z
0
0
null
[ "region:us" ]
2023-11-13T04:32:39Z
2023-11-13T03:37:42.000Z
2023-11-13T03:37:42
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
VatsaDev/fusion
VatsaDev
2023-11-28T05:21:56Z
0
1
null
[ "task_categories:question-answering", "task_categories:text-generation", "task_categories:text-classification", "language:en", "language:fr", "language:it", "language:hi", "license:mit", "region:us" ]
2023-11-28T05:21:56Z
2023-11-13T03:38:33.000Z
2023-11-13T03:38:33
--- license: mit task_categories: - question-answering - text-generation - text-classification language: - en - fr - it - hi --- # The Fusion Dataset Stages - raw text files in folders - deduplicated text - synthetic grading, llm filters - final dataset Split into 3 parts - Social - Includes Data from roleplay and social media, probably the noisest data*, helpful in making llms resistant to bad grammar and Gen Alpha - Pretrain - Large amounts of text for model pretrains, textbooks, webscrapes, etc - Instruct - Instruction data, meant to a single format, for a model finetune * Also the most uncensored/unfiltered, there is 4chan in here, probs should filter this
[ -0.3314647674560547, -0.7536057829856873, 0.3768875002861023, 0.11364521086215973, -0.2848774790763855, 0.3371366262435913, 0.14987170696258545, -0.19910092651844025, 0.05188094824552536, 0.8770554661750793, -0.8378650546073914, -0.8328865766525269, -0.4457751512527466, 0.338607519865036, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
SirPumpernickle/GaussianSplats
SirPumpernickle
2023-11-13T03:51:35Z
0
0
null
[ "license:mit", "region:us" ]
2023-11-13T03:51:35Z
2023-11-13T03:50:41.000Z
2023-11-13T03:50:41
--- license: mit ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
Robert-Rodrigues/Vozes
Robert-Rodrigues
2023-11-13T04:02:47Z
0
0
null
[ "region:us" ]
2023-11-13T04:02:47Z
2023-11-13T04:00:23.000Z
2023-11-13T04:00:23
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
Ammar-Azman/test_dataset
Ammar-Azman
2023-11-13T04:08:37Z
0
0
null
[ "license:mit", "region:us" ]
2023-11-13T04:08:37Z
2023-11-13T04:08:37.000Z
2023-11-13T04:08:37
--- license: mit ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/model_test
Reza8848
2023-11-13T04:16:22Z
0
0
null
[ "region:us" ]
2023-11-13T04:16:22Z
2023-11-13T04:16:22.000Z
2023-11-13T04:16:22
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/15011_dolly
Reza8848
2023-11-13T04:19:02Z
0
0
null
[ "region:us" ]
2023-11-13T04:19:02Z
2023-11-13T04:19:02.000Z
2023-11-13T04:19:02
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
LLM-Tuning-Safety/HEx-PHI
LLM-Tuning-Safety
2023-11-28T21:54:11Z
0
0
null
[ "task_categories:text-generation", "task_categories:conversational", "size_categories:n<1K", "language:en", "license:other", "harmfulness", "benchmark", "arxiv:2310.03693", "arxiv:2307.15043", "region:us" ]
2023-11-28T21:54:11Z
2023-11-13T04:19:21.000Z
2023-11-13T04:19:21
--- license: other license_name: hex-phi license_link: https://huggingface.co/datasets/LLM-Tuning-Safety/HEx-PHI/#hex-phi-dataset-license-agreement extra_gated_prompt: You agree to the [HEx-PHI Dataset License Agreement](https://huggingface.co/datasets/LLM-Tuning-Safety/HEx-PHI/#hex-phi-dataset-license-agreement). Also, please specify the following fields, based on which we will inspect and manually grant access to approved users. extra_gated_fields: Name: text Email: text Affiliation: text Country: text Purpose: text configs: - config_name: default data_files: - split: Category_1_Illegal_Activity path: category_1.csv - split: Category_2_Child_Abuse_Content path: category_2.csv - split: Category_3_Hate_Harass_Violence path: category_3.csv - split: Category_4_Malware path: category_4.csv - split: Category_5_Physical_Harm path: category_5.csv - split: Category_6_Economic_Harm path: category_6.csv - split: Category_7_Fraud_Deception path: category_7.csv - split: Category_8_Adult_Content path: category_8.csv - split: Category_9_Political_Campaigning path: category_9.csv - split: Category_10_Privacy_Violation_Activity path: category_10.csv - split: Category_11_Tailored_Financial_Advice path: category_11.csv task_categories: - text-generation - conversational language: - en pretty_name: Human-Extended Policy-Oriented Harmful Instruction Benchmark size_categories: - n<1K tags: - harmfulness - benchmark --- # HEx-PHI: **H**uman-**Ex**tended **P**olicy-Oriented **H**armful Instruction Benchmark This dataset contains 330 harmful instructions (30 examples x 11 prohibited categories) for LLM harmfulness evaluation. In our work "[Fine-tuning Aligned Language Models Compromises Safety, Even When Users Do Not Intend To!](https://arxiv.org/pdf/2310.03693.pdf)", to comprehensively cover as many harmfulness categories as possible, we develop this new safety evaluation benchmark directly based on the exhaustive lists of prohibited use cases found in **Meta**’s Llama-2 usage policy and **OpenAI**’s usage policy. Specifically, we gather 30 examples of harmful instructions for each of the 11 prohibited use case categories from various sources, including: * Anthropic [Red Teaming Data](https://github.com/anthropics/hh-rlhf/tree/master/red-team-attempts) * *AdvBench* from "[Universal and Transferable Adversarial Attacks on Aligned Language Models](https://arxiv.org/abs/2307.15043)" * The harmful requests are further categorized & refined by human annotators, GPT-4, Claude, jailbroken GPT-3.5, jailbroken Llama2, etc. For more details, please refer to our project website: [http://llm-tuning-safety.github.io](http://llm-tuning-safety.github.io). ## Dataset Format The 11 categories of harmful instructions are stored separately in 11 independent files in plain CSV format: * [category_1.csv](category_1.csv): Illegal Activity * [category_2.csv](category_2.csv): Child Abuse Content * [category_3.csv](category_3.csv): Hate / Harass /Violence * [category_4.csv](category_4.csv): Malware * [category_5.csv](category_5.csv): Physical Harm * [category_6.csv](category_6.csv): Economic Harm * [category_8.csv](category_7.csv): Fraud Deception * [category_7.csv](category_8.csv): Adult Content * [category_9.csv](category_9.csv): Political Campaigning * [category_10.csv](category_10.csv): Privacy Violation Activity * [category_11.csv](category_11.csv): Tailored Financial Advice ## Dataset Usage With our 11 harmful categories, we hope HEx-PHI can help researchers comprehensively evaluate fine-grained safety risks and harmfulness underlying LLMs. Practically, one should use HEx-PHI harmful instructions as (part of) input prompts, and *inspect*👀 whether the responses generated by the LLM satisfy the harmful intentions of the instructions. 👀In our [paper](https://arxiv.org/pdf/2310.03693.pdf), we use GPT-4 as the harmfulness judge to assign each `<user instruction, model response>` pair a harmfulness score from 1 to 5. Refer to Appendix B for details. ## HEx-PHI Dataset License Agreement This Agreement contains the terms and conditions that govern your access and use of the HEx-PHI Dataset (as defined above). You may not use the HEx-PHI Dataset if you do not accept this Agreement. By clicking to accept, accessing the HEx-PHI Dataset, or both, you hereby agree to the terms of the Agreement. If you are agreeing to be bound by the Agreement on behalf of your employer or another entity, you represent and warrant that you have full legal authority to bind your employer or such entity to this Agreement. If you do not have the requisite authority, you may not accept the Agreement or access the HEx-PHI Dataset on behalf of your employer or another entity. * Safety and Moderation: **This dataset contains unsafe conversations or prompts that may be perceived as offensive or unsettling.** Users may not use this dataset for training machine learning models for any harmful purpose. The dataset may not be used to generate content in violation of any law. These prompts should not be used as inputs to models that can generate modalities outside of text (including, but not limited to, images, audio, video, or 3D models) * Non-Endorsement: The views and opinions depicted in this dataset **do not reflect** the perspectives of the researchers or affiliated institutions engaged in the data collection process. * Legal Compliance: You are mandated to use it in adherence with all pertinent laws and regulations. * Model Specific Terms: When leveraging direct outputs of a specific model, users must adhere to its **corresponding terms of use and relevant legal standards**. * Non-Identification: You **must not** attempt to identify the identities of individuals or infer any sensitive personal data encompassed in this dataset. * Prohibited Transfers: You **should not** distribute, copy, disclose, assign, sublicense, embed, host, or otherwise transfer the dataset to any third party. * Right to Request Deletion: At any time, we may require you to delete all copies of this instruction dataset (in whole or in part) in your possession and control. You will promptly comply with any and all such requests. Upon our request, you shall provide us with written confirmation of your compliance with such requirement. * Termination: We may, at any time, for any reason or for no reason, terminate this Agreement, effective immediately upon notice to you. Upon termination, the license granted to you hereunder will immediately terminate, and you will immediately stop using the HEx-PHI Dataset and destroy all copies of the HEx-PHI Dataset and related materials in your possession or control. * Limitation of Liability: IN NO EVENT WILL WE BE LIABLE FOR ANY CONSEQUENTIAL, INCIDENTAL, EXEMPLARY, PUNITIVE, SPECIAL, OR INDIRECT DAMAGES (INCLUDING DAMAGES FOR LOSS OF PROFITS, BUSINESS INTERRUPTION, OR LOSS OF INFORMATION) ARISING OUT OF OR RELATING TO THIS AGREEMENT OR ITS SUBJECT MATTER, EVEN IF WE HAVE BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. Subject to your compliance with the terms and conditions of this Agreement, we grant to you, a limited, non-exclusive, non-transferable, non-sublicensable license to use the HEx-PHI Dataset, including the conversation data and annotations, to research, and evaluate software, algorithms, machine learning models, techniques, and technologies for both research and commercial purposes. ## Citation ``` @article{qi2023fine, title={Fine-tuning Aligned Language Models Compromises Safety, Even When Users Do Not Intend To!}, author={Qi, Xiangyu and Zeng, Yi and Xie, Tinghao and Chen, Pin-Yu and Jia, Ruoxi and Mittal, Prateek and Henderson, Peter}, journal={arXiv preprint arXiv:2310.03693}, year={2023} } ```
[ -0.17154930531978607, -0.6216675043106079, 0.1231948584318161, 0.22047212719917297, -0.3992995321750641, -0.19415131211280823, -0.03677615150809288, -0.6360380053520203, -0.14871379733085632, 0.6625673174858093, -0.533118724822998, -1.059831142425537, -0.646170973777771, -0.024056764319539...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/68000_selfInst
Reza8848
2023-11-13T04:19:26Z
0
0
null
[ "region:us" ]
2023-11-13T04:19:26Z
2023-11-13T04:19:26.000Z
2023-11-13T04:19:26
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
ryouu/Kurumi
ryouu
2023-11-13T04:19:35Z
0
0
null
[ "region:us" ]
2023-11-13T04:19:35Z
2023-11-13T04:19:35.000Z
2023-11-13T04:19:35
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/52002_alpaca
Reza8848
2023-11-13T04:19:51Z
0
0
null
[ "region:us" ]
2023-11-13T04:19:51Z
2023-11-13T04:19:51.000Z
2023-11-13T04:19:51
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/52002_alpaca_gpt4
Reza8848
2023-11-13T04:20:12Z
0
0
null
[ "region:us" ]
2023-11-13T04:20:12Z
2023-11-13T04:20:12.000Z
2023-11-13T04:20:12
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/66696_dynosaur
Reza8848
2023-11-13T04:20:51Z
0
0
null
[ "region:us" ]
2023-11-13T04:20:51Z
2023-11-13T04:20:51.000Z
2023-11-13T04:20:51
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/27739_longform
Reza8848
2023-11-13T04:21:19Z
0
0
null
[ "region:us" ]
2023-11-13T04:21:19Z
2023-11-13T04:21:19.000Z
2023-11-13T04:21:19
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/68000_SuperNI
Reza8848
2023-11-13T04:21:40Z
0
0
null
[ "region:us" ]
2023-11-13T04:21:40Z
2023-11-13T04:21:40.000Z
2023-11-13T04:21:40
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/Unnatural
Reza8848
2023-11-13T04:22:02Z
0
0
null
[ "region:us" ]
2023-11-13T04:22:02Z
2023-11-13T04:22:02.000Z
2023-11-13T04:22:02
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/68000_Wizard
Reza8848
2023-11-13T04:22:29Z
0
0
null
[ "region:us" ]
2023-11-13T04:22:29Z
2023-11-13T04:22:29.000Z
2023-11-13T04:22:29
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/muffin_68014-1e-5
Reza8848
2023-11-13T04:23:14Z
0
0
null
[ "region:us" ]
2023-11-13T04:23:14Z
2023-11-13T04:23:14.000Z
2023-11-13T04:23:14
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/muffin_68014-2e-5
Reza8848
2023-11-13T04:23:37Z
0
0
null
[ "region:us" ]
2023-11-13T04:23:37Z
2023-11-13T04:23:37.000Z
2023-11-13T04:23:37
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/muffin_68014-3e-5
Reza8848
2023-11-13T04:23:54Z
0
0
null
[ "region:us" ]
2023-11-13T04:23:54Z
2023-11-13T04:23:54.000Z
2023-11-13T04:23:54
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Reza8848/muffin_68014-5e-5
Reza8848
2023-11-13T04:24:13Z
0
0
null
[ "region:us" ]
2023-11-13T04:24:13Z
2023-11-13T04:24:13.000Z
2023-11-13T04:24:13
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null