id
stringlengths
2
115
author
stringlengths
2
42
last_modified
timestamp[us, tz=UTC]
downloads
int64
0
8.87M
likes
int64
0
3.84k
paperswithcode_id
stringlengths
2
45
tags
list
lastModified
timestamp[us, tz=UTC]
createdAt
stringlengths
24
24
key
stringclasses
1 value
created
timestamp[us]
card
stringlengths
1
1.01M
embedding
list
library_name
stringclasses
21 values
pipeline_tag
stringclasses
27 values
mask_token
null
card_data
null
widget_data
null
model_index
null
config
null
transformers_info
null
spaces
null
safetensors
null
transformersInfo
null
modelId
stringlengths
5
111
embeddings
list
tyzhu/find_last_sent_train_50_eval_40
tyzhu
2023-11-08T06:31:40Z
0
0
null
[ "region:us" ]
2023-11-08T06:31:40Z
2023-11-08T06:29:41.000Z
2023-11-08T06:29:41
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: validation path: data/validation-* dataset_info: features: - name: inputs dtype: string - name: targets dtype: string - name: title dtype: string - name: context dtype: string splits: - name: train num_bytes: 180593 num_examples: 140 - name: validation num_bytes: 39604 num_examples: 40 download_size: 0 dataset_size: 220197 --- # Dataset Card for "find_last_sent_train_50_eval_40" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.4636705815792084, -0.053982146084308624, 0.39901989698410034, 0.28286927938461304, -0.09107440710067749, 0.10144593566656113, 0.13625292479991913, 0.2238321751356125, 0.6329540610313416, 0.39440539479255676, -0.8158881664276123, -0.7473084330558777, -0.4267062544822693, -0.1695759445428...
null
null
null
null
null
null
null
null
null
null
null
null
null
linxin020826/dehazing_test
linxin020826
2023-11-08T06:47:38Z
0
0
null
[ "region:us" ]
2023-11-08T06:47:38Z
2023-11-08T06:37:21.000Z
2023-11-08T06:37:21
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
aigeek0x0/alpaca-gpt4-1000
aigeek0x0
2023-11-15T00:37:55Z
0
1
null
[ "license:cc-by-nc-4.0", "region:us" ]
2023-11-15T00:37:55Z
2023-11-08T07:11:45.000Z
2023-11-08T07:11:45
--- license: cc-by-nc-4.0 --- The dataset consists of 1000 randomly selected rows from the original [alpaca-gpt4](https://huggingface.co/datasets/vicgalle/alpaca-gpt4) dataset.
[ -0.8627439737319946, -0.7070332169532776, 0.4449124038219452, 0.2871515154838562, -0.20424368977546692, -0.43649667501449585, 0.5413141846656799, -0.2858302593231201, 1.1002793312072754, 0.9063655138015747, -0.9082609415054321, -0.28802430629730225, -0.29344967007637024, 0.1374115347862243...
null
null
null
null
null
null
null
null
null
null
null
null
null
KETI-AIR/kor_anli
KETI-AIR
2023-11-15T01:18:06Z
0
0
anli
[ "task_categories:text-classification", "task_ids:natural-language-inference", "task_ids:multi-input-text-classification", "size_categories:100K<n<1M", "language:ko", "license:cc-by-nc-4.0", "region:us" ]
2023-11-15T01:18:06Z
2023-11-08T07:39:41.000Z
2023-11-08T07:39:41
--- language: - ko license: - cc-by-nc-4.0 size_categories: - 100K<n<1M task_categories: - text-classification task_ids: - natural-language-inference - multi-input-text-classification paperswithcode_id: anli pretty_name: Adversarial NLI dataset_info: features: - name: data_index_by_user dtype: int32 - name: premise dtype: string - name: hypothesis dtype: string - name: label dtype: class_label: names: '0': entailment '1': neutral '2': contradiction - name: reason dtype: string splits: - name: train_r1 num_bytes: 8505556 num_examples: 16946 - name: train_r2 num_bytes: 22521662 num_examples: 45460 - name: train_r3 num_bytes: 48605206 num_examples: 100459 - name: dev_r1 num_bytes: 628891 num_examples: 1000 - name: dev_r2 num_bytes: 613763 num_examples: 1000 - name: dev_r3 num_bytes: 740840 num_examples: 1200 - name: test_r1 num_bytes: 626555 num_examples: 1000 - name: test_r2 num_bytes: 633241 num_examples: 1000 - name: test_r3 num_bytes: 736887 num_examples: 1200 download_size: 23386318 dataset_size: 83612601 --- # Dataset Card for anli ## Licensing Information [cc-4 Attribution-NonCommercial](https://github.com/facebookresearch/anli/blob/main/LICENSE) ## Source Data Citation INformation ``` @InProceedings{nie2019adversarial, title={Adversarial NLI: A New Benchmark for Natural Language Understanding}, author={Nie, Yixin and Williams, Adina and Dinan, Emily and Bansal, Mohit and Weston, Jason and Kiela, Douwe}, booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics", year = "2020", publisher = "Association for Computational Linguistics", }
[ -0.27917343378067017, -0.8347379565238953, 0.1181754618883133, 0.20840021967887878, 0.04289272800087929, 0.060922227799892426, -0.24956457316875458, -0.42447125911712646, 0.09586293995380402, 0.3098617494106293, -0.6741060018539429, -0.5948660373687744, -0.557064950466156, 0.32905715703964...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Non-safety_and_inductive_Prompt_data
Nexdata
2023-11-22T09:37:42Z
0
0
null
[ "task_categories:text-generation", "task_categories:zero-shot-classification", "language:zh", "region:us" ]
2023-11-22T09:37:42Z
2023-11-08T08:02:36.000Z
2023-11-08T08:02:36
--- task_categories: - text-generation - zero-shot-classification language: - zh --- # Dataset Card for Nexdata/Non-safety_and_inductive_Prompt_data ## Description Non-safety and inductive Prompt data, about 500,000 in total, this dataset can be used for tasks such as LLM training, chatgpt For more details, please refer to the link: https://www.nexdata.ai/datasets/1349?source=Huggingface # Specifications ## Data content Non-safety and inductive Prompt data ## Data size About 500,000 ## Collecting type volience,politically sensitive,bad language,porn,induce ## Collecting method written by professional annotators ## Storage format txt ## Language Chinese # Licensing Information Commercial License
[ -0.5084230899810791, -0.6425987482070923, -0.0484524741768837, 0.47560325264930725, -0.27764004468917847, -0.3342131972312927, 0.07448650896549225, -0.26676368713378906, 0.3685642182826996, 0.5287967920303345, -0.752196192741394, -0.8015486001968384, -0.34048327803611755, 0.190940439701080...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Unsupervised_Text_Data_For_Literary_Subjects
Nexdata
2023-11-10T07:21:54Z
0
0
null
[ "task_categories:conversational", "language:zh", "region:us" ]
2023-11-10T07:21:54Z
2023-11-08T08:05:07.000Z
2023-11-08T08:05:07
--- task_categories: - conversational language: - zh --- # Dataset Card for Nexdata/Unsupervised_Text_Data_For_Literary_Subjects ## Description Subjects content data, about 1T in total; each piece of subjects' content contains title,text,author,date,subject,keyword; this dataset can be used for tasks such as LLM training, chatgpt For more details, please refer to the link: https://www.nexdata.ai/datasets/1310?source=Huggingface # Specifications ## Data content News content data,about 79 subjects ## Data size About 1TB ## Data fields Text data with title,text,author,date,subject,keyword ## Collection method Using keywords to retrieve data from massive databases, and the keywords are the subject and keyword ## Storage format json ## Language Chinese # Licensing Information Commercial License
[ -0.18958882987499237, -0.5649231672286987, 0.25833022594451904, 0.3336004614830017, -0.45565715432167053, -0.1582212895154953, -0.1730203777551651, -0.015924960374832153, 0.41572675108909607, 0.9916139841079712, -0.6327949166297913, -0.9969897866249084, -0.4722239375114441, 0.0695911273360...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Multi-Round_Interpersonal_Dialogues_Text_Data
Nexdata
2023-11-10T07:19:51Z
0
0
null
[ "language:zh", "region:us" ]
2023-11-10T07:19:51Z
2023-11-08T08:08:11.000Z
2023-11-08T08:08:11
--- language: - zh --- # Dataset Card for Nexdata/Multi-Round_Interpersonal_Dialogues_Text_Data ## Description This database is the interactive text corpus of real users on the mobile phone. The database itself has been desensitized to ensure of no private information of the user's (A and B are the codes to replace the sender and receiver, and sensitive information such as cellphone number and user name are replaced with '* * *'). This database can be used for tasks such as natural language understanding. For more details, please refer to the link: https://www.nexdata.ai/datasets/150?source=Huggingface # Specifications ## Data content text corpus of multi-round interpersonal dialogues in the real world. ## Data size 830,276 groups. ## Collecting period the year 2,015 ## Storage format txt ## Language Chinese ## Applications semantic parsing of multi-round dialogues in smart customer service and intelligent interaction scenarios. # Licensing Information Commercial License
[ -0.5557376742362976, -1.0586371421813965, 0.1111125573515892, 0.6112616062164307, -0.10395451635122299, -0.004649848211556673, -0.1408703476190567, -0.4683052599430084, 0.5297184586524963, 0.5732030272483826, -0.7734461426734924, -0.6460506319999695, -0.21635273098945618, 0.224368020892143...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Chinese_Medical_Question_Answering_Data
Nexdata
2023-11-10T07:18:08Z
0
3
null
[ "task_categories:question-answering", "language:zh", "region:us" ]
2023-11-10T07:18:08Z
2023-11-08T08:10:28.000Z
2023-11-08T08:10:28
--- task_categories: - question-answering language: - zh --- # Dataset Card for Nexdata/Chinese_Medical_Question_Answering_Data ## Description The data contains 203,029 groups Chinese question answering data between doctors and patients of different diseases. For more details, please refer to the link: https://www.nexdata.ai/datasets/1086?source=Huggingface # Specifications ## Data content 203,029 Chinese question & answering data between doctors and patients; ## Storage format json ## Language Chinese ## Collection time May 2,020 ## Application scenario intelligence medical diagnosis; question answering # Licensing Information Commercial License
[ -0.37287744879722595, -0.7882391214370728, 0.4231880009174347, 0.3217333257198334, -0.020753979682922363, -0.282550185918808, 0.24348269402980804, -0.484256774187088, 0.6140000224113464, 0.6517589092254639, -0.7783190608024597, -0.7699757218360901, -0.42706263065338135, 0.15121254324913025...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Minnan_Dialect_Pronunciation_Dictionary
Nexdata
2023-11-10T07:17:04Z
0
0
null
[ "task_categories:automatic-speech-recognition", "region:us" ]
2023-11-10T07:17:04Z
2023-11-08T08:11:55.000Z
2023-11-08T08:11:55
--- task_categories: - automatic-speech-recognition --- # Dataset Card for Nexdata/Minnan_Dialect_Pronunciation_Dictionary ## Description Each entry consists of three parts: words, pinyin, and tones. The dictionary can be used to provide pronunciation reference for sound recording personnel, research, and development of pronunciation recognition technology, etc. For more details, please refer to the link: https://www.nexdata.ai/datasets/51?source=Huggingface # Specifications ## Format txt ## Content 87,166 Minnan dialect words and corresponding phonetic symbols. ## Language Minnan dialect ## Application scenario speech recognition # Licensing Information Commercial License
[ -0.5946226716041565, -0.42012810707092285, 0.17083704471588135, 0.3872952163219452, -0.47650423645973206, -0.428372859954834, -0.1278214007616043, -0.2803901731967926, 0.9025518894195557, 0.7129786610603333, -0.5906749963760376, -0.7729912400245667, -0.18519563972949982, 0.4938161969184875...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Cantonese_Pronunciation_Dictionary
Nexdata
2023-11-10T07:17:36Z
0
0
null
[ "task_categories:automatic-speech-recognition", "region:us" ]
2023-11-10T07:17:36Z
2023-11-08T08:13:30.000Z
2023-11-08T08:13:30
--- task_categories: - automatic-speech-recognition --- # Dataset Card for Nexdata/Cantonese_Pronunciation_Dictionary ## Description This pronunciation dictionary collects words with dialect characteristics in Guangdong cantonese regions. Each entry consists of three parts: words, pinyin and tones. The dictionary can be used to provide pronunciation reference for sound recording personnel, research and development of pronunciation recognition technology, etc. For more details, please refer to the link: https://www.nexdata.ai/datasets/55?source=Huggingface # Specifications ## Data Size 80,279 entries ## Content Cantonese Pronunciation Dictionary collects Cantonese vocabulary and pronunciation # Licensing Information Commercial License
[ -0.4319288730621338, -0.4251648485660553, 0.04208402708172798, 0.5710606575012207, -0.23806564509868622, -0.45514944195747375, 0.17045855522155762, -0.2144360989332199, 0.7350354790687561, 0.7110684514045715, -0.4735703766345978, -0.647758424282074, -0.2229524403810501, 0.28044813871383667...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Mandarin_Pronunciation_Dictionary
Nexdata
2023-11-10T07:16:45Z
0
0
null
[ "task_categories:automatic-speech-recognition", "language:zh", "region:us" ]
2023-11-10T07:16:45Z
2023-11-08T08:14:47.000Z
2023-11-08T08:14:47
--- task_categories: - automatic-speech-recognition language: - zh --- # Dataset Card for Nexdata/Mandarin_Pronunciation_Dictionary ## Description The data contains 570,060 entries. All words and pronunciations are produced by linguists. It can be used in the research and development of Mandarin ASR technology. For more details, please refer to the link: https://www.nexdata.ai/datasets/1094?source=Huggingface # Specifications ## Format TXT ## Data content 570,060 Mandarin words and corresponding pinyin ## Language Mandarin ## Application scenario speech recognition # Licensing Information Commercial License
[ -0.3149147033691406, -0.27657297253608704, -0.19719427824020386, 0.42900022864341736, -0.06993706524372101, -0.30304205417633057, -0.18508145213127136, -0.29012733697891235, 0.4014412760734558, 0.5408499836921692, -0.3799806535243988, -0.6260371804237366, -0.2353665679693222, 0.32223105430...
null
null
null
null
null
null
null
null
null
null
null
null
null
k8tems/splats
k8tems
2023-11-08T08:16:41Z
0
0
null
[ "region:us" ]
2023-11-08T08:16:41Z
2023-11-08T08:16:05.000Z
2023-11-08T08:16:05
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Japanese_Pronunciation_Dictionary
Nexdata
2023-11-10T07:16:29Z
0
0
null
[ "task_categories:automatic-speech-recognition", "language:ja", "region:us" ]
2023-11-10T07:16:29Z
2023-11-08T08:16:14.000Z
2023-11-08T08:16:14
--- task_categories: - automatic-speech-recognition language: - ja --- # Dataset Card for Nexdata/Japanese_Pronunciation_Dictionary ## Description The data contains 101,702 entries. All words and pronunciations are produced by Japanese linguists. It can be used in the research and development of Japanese ASR technology. For more details, please refer to the link: https://www.nexdata.ai/datasets/1088?source=Huggingface # Specifications ## Format TXT ## Data content 101,702 Japanese words and corresponding hiragana characters ## Language Japanese ## Application scenario speech recognition # Licensing Information Commercial License
[ -0.5960499048233032, -0.47216665744781494, 0.18507647514343262, 0.23875382542610168, -0.022891134023666382, -0.20807980000972748, -0.12255805730819702, -0.3261622190475464, 0.5558441877365112, 0.7143965363502502, -0.6298812031745911, -0.7219082117080688, -0.5036940574645996, 0.387434244155...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/English_Pronunciation_Dictionary
Nexdata
2023-11-10T07:16:06Z
0
0
null
[ "task_categories:automatic-speech-recognition", "language:en", "region:us" ]
2023-11-10T07:16:06Z
2023-11-08T08:17:32.000Z
2023-11-08T08:17:32
--- task_categories: - automatic-speech-recognition language: - en --- # Dataset Card for Nexdata/English_Pronunciation_Dictionary ## Description The data contains 500,113 entries. All words and pronunciations are produced by English linguists. It can be used in the research and development of English ASR technology. For more details, please refer to the link: https://www.nexdata.ai/datasets/1095?source=Huggingface # Specifications ## Format txt ## Data content 500,113 English words and corresponding phonetic transcriptions ## Language English ## Application scenario speech recognition # Licensing Information Commercial License
[ -0.5344926714897156, -0.31909099221229553, -0.04408960044384003, 0.3471442461013794, 0.12374008446931839, -0.17424780130386353, -0.11194409430027008, -0.38298967480659485, 0.6051002144813538, 0.5158416032791138, -0.4468185007572174, -0.6438552141189575, -0.48819422721862793, 0.476295590400...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Korean_Pronunciation_Dictionary
Nexdata
2023-11-10T07:16:18Z
0
0
null
[ "task_categories:automatic-speech-recognition", "language:ko", "region:us" ]
2023-11-10T07:16:18Z
2023-11-08T08:18:47.000Z
2023-11-08T08:18:47
--- task_categories: - automatic-speech-recognition language: - ko --- # Dataset Card for Nexdata/Korean_Pronunciation_Dictionary ## Description The data contains 444,202 entries. All words and pronunciations are produced by Korean linguists. It can be used in the research and development of Korean ASR technology. For more details, please refer to the link: https://www.nexdata.ai/datasets/1221?source=Huggingface # Specifications ## Format txt ## Content 444,202 Korean words and corresponding phonetic symbols. ## Language Korean ## Application scenarios speech recognition, speech synthesis. # Licensing Information Commercial License
[ -0.479617714881897, -0.13003137707710266, 0.13260327279567719, 0.5196365118026733, -0.03472582995891571, -0.008845601230859756, -0.11018628627061844, -0.2783331871032715, 0.5127196311950684, 0.7008427381515503, -0.6575319766998291, -0.837530255317688, -0.420219361782074, 0.4293049275875091...
null
null
null
null
null
null
null
null
null
null
null
null
null
nlplabtdtu/wikihow-processes-vi
nlplabtdtu
2023-11-08T08:53:12Z
0
0
null
[ "region:us" ]
2023-11-08T08:53:12Z
2023-11-08T08:47:45.000Z
2023-11-08T08:47:45
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
yuancj/character-profiles-romance-prepare
yuancj
2023-11-08T08:56:34Z
0
0
null
[ "region:us" ]
2023-11-08T08:56:34Z
2023-11-08T08:54:21.000Z
2023-11-08T08:54:21
--- dataset_info: features: [] splits: - name: train num_bytes: 0 num_examples: 0 download_size: 0 dataset_size: 0 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "character-profiles-romance-prepare" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.5061955451965332, -0.39656758308410645, 0.25191330909729004, 0.488713800907135, 0.09980081766843796, 0.022285880520939827, 0.2858847677707672, -0.03535621985793114, 1.0202980041503906, 0.7108550071716309, -1.051865577697754, -0.7663849592208862, -0.18095582723617554, -0.0215177871286869...
null
null
null
null
null
null
null
null
null
null
null
null
null
fate7/weather4cast
fate7
2023-11-08T09:10:33Z
0
0
null
[ "license:apache-2.0", "region:us" ]
2023-11-08T09:10:33Z
2023-11-08T09:10:33.000Z
2023-11-08T09:10:33
--- license: apache-2.0 ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
mhenrichsen/creator
mhenrichsen
2023-11-08T09:20:26Z
0
0
null
[ "region:us" ]
2023-11-08T09:20:26Z
2023-11-08T09:20:23.000Z
2023-11-08T09:20:23
--- dataset_info: features: - name: id dtype: int64 - name: conversations list: - name: from dtype: string - name: value dtype: string splits: - name: train num_bytes: 375555 num_examples: 1000 download_size: 11378 dataset_size: 375555 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "creator" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.588806688785553, -0.35239022970199585, 0.2331690639257431, 0.2412286251783371, -0.15815867483615875, 0.10401222854852676, 0.1708575189113617, -0.15405461192131042, 0.9312933087348938, 0.5600684881210327, -0.8718087673187256, -0.5909435153007507, -0.7145982980728149, -0.15810388326644897...
null
null
null
null
null
null
null
null
null
null
null
null
null
elzooz/Amod_mental_health_counseling_conversations
elzooz
2023-11-08T09:27:51Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-08T09:27:51Z
2023-11-08T09:27:51.000Z
2023-11-08T09:27:51
--- license: openrail ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
baixue6269/character-profiles-romance-output
baixue6269
2023-11-08T10:00:39Z
0
0
null
[ "region:us" ]
2023-11-08T10:00:39Z
2023-11-08T09:28:13.000Z
2023-11-08T09:28:13
--- dataset_info: features: - name: name dtype: string - name: categories sequence: string - name: personalities sequence: string - name: description dtype: string - name: conversation list: - name: content dtype: string - name: role dtype: string splits: - name: train num_bytes: 103050 num_examples: 10 download_size: 67798 dataset_size: 103050 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "character-profiles-romance-output" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.45209038257598877, -0.387479305267334, 0.23499053716659546, 0.2828691303730011, -0.013331077992916107, 0.02942688949406147, 0.16504186391830444, -0.053221505135297775, 1.1118868589401245, 0.7320383787155151, -1.1390290260314941, -0.9179519414901733, -0.3474879860877991, -0.1302829235792...
null
null
null
null
null
null
null
null
null
null
null
null
null
yuancj/character-profiles-romance-prepare2
yuancj
2023-11-08T11:25:53Z
0
0
null
[ "region:us" ]
2023-11-08T11:25:53Z
2023-11-08T09:34:07.000Z
2023-11-08T09:34:07
--- dataset_info: features: - name: name dtype: string - name: categories sequence: string - name: personalities sequence: string - name: description dtype: string - name: conversation list: - name: content dtype: string - name: role dtype: string splits: - name: train num_bytes: 20485 num_examples: 2 download_size: 30036 dataset_size: 20485 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "character-profiles-romance-prepare2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.38730111718177795, -0.3518264591693878, 0.2578680217266083, 0.5174130797386169, 0.026587575674057007, -0.020008591935038567, 0.32584312558174133, -0.1085728257894516, 0.8953464031219482, 0.6888488531112671, -0.9588369727134705, -0.6355290412902832, -0.2959051728248596, -0.05939191579818...
null
null
null
null
null
null
null
null
null
null
null
null
null
ianjayyuson/solo_pic
ianjayyuson
2023-11-08T20:45:15Z
0
0
null
[ "region:us" ]
2023-11-08T20:45:15Z
2023-11-08T09:51:35.000Z
2023-11-08T09:51:35
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
albertvillanova/wiki
albertvillanova
2023-11-08T09:56:09Z
0
0
null
[ "region:us" ]
2023-11-08T09:56:09Z
2023-11-08T09:56:09.000Z
2023-11-08T09:56:09
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
CarrotzRule123/crawl-apu.edu
CarrotzRule123
2023-11-08T09:58:19Z
0
0
null
[ "region:us" ]
2023-11-08T09:58:19Z
2023-11-08T09:56:10.000Z
2023-11-08T09:56:10
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_80
arieg
2023-11-08T09:58:53Z
0
0
null
[ "region:us" ]
2023-11-08T09:58:53Z
2023-11-08T09:58:43.000Z
2023-11-08T09:58:43
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '10' '1': '1039' '2': '1040' '3': '1082' '4': '1083' '5': '1102' '6': '1193' '7': '1195' '8': '1196' '9': '1197' '10': '1270' '11': '1276' '12': '1277' '13': '1278' '14': '140' '15': '141' '16': '1417' '17': '1427' '18': '1443' '19': '1482' '20': '1510' '21': '1544' '22': '1642' '23': '1644' '24': '1649' '25': '1661' '26': '1663' '27': '1666' '28': '1673' '29': '1680' '30': '1681' '31': '1682' '32': '1683' '33': '1684' '34': '1685' '35': '190' '36': '193' '37': '194' '38': '197' '39': '2' '40': '200' '41': '203' '42': '204' '43': '207' '44': '210' '45': '211' '46': '212' '47': '213' '48': '255' '49': '256' '50': '368' '51': '424' '52': '5' '53': '534' '54': '540' '55': '546' '56': '574' '57': '615' '58': '620' '59': '621' '60': '625' '61': '666' '62': '667' '63': '676' '64': '694' '65': '695' '66': '714' '67': '715' '68': '716' '69': '718' '70': '777' '71': '814' '72': '821' '73': '822' '74': '825' '75': '853' '76': '897' '77': '995' '78': '997' '79': '998' splits: - name: train num_bytes: 89716674.4 num_examples: 1600 download_size: 87975685 dataset_size: 89716674.4 --- # Dataset Card for "bw_spec_cls_80" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7281789183616638, -0.1844707578420639, 0.1933465152978897, 0.2655416429042816, -0.2622053921222687, -0.16420036554336548, 0.0011206622002646327, -0.2817683517932892, 0.56434166431427, 0.5333073735237122, -0.7624777555465698, -0.8276515603065491, -0.5609703063964844, -0.18455566465854645...
null
null
null
null
null
null
null
null
null
null
null
null
null
AdamMashaka/MCV
AdamMashaka
2023-11-08T09:59:05Z
0
0
null
[ "license:apache-2.0", "region:us" ]
2023-11-08T09:59:05Z
2023-11-08T09:59:05.000Z
2023-11-08T09:59:05
--- license: apache-2.0 ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
faizalnf1800/bangs_under_sidepart_hairstyle_anime_woman
faizalnf1800
2023-11-08T09:59:42Z
0
0
null
[ "region:us" ]
2023-11-08T09:59:42Z
2023-11-08T09:59:42.000Z
2023-11-08T09:59:42
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
ldp72/test2
ldp72
2023-11-08T10:03:13Z
0
0
null
[ "region:us" ]
2023-11-08T10:03:13Z
2023-11-08T10:03:13.000Z
2023-11-08T10:03:13
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/English-Japanese_Parallel_Corpus_Data
Nexdata
2023-11-10T07:15:01Z
0
0
null
[ "task_categories:translation", "language:en", "language:ja", "region:us" ]
2023-11-10T07:15:01Z
2023-11-08T10:47:40.000Z
2023-11-08T10:47:40
--- task_categories: - translation language: - en - ja --- # Dataset Card for Nexdata/English-Japanese_Parallel_Corpus_Data ## Description The 850,000 English Japanese Parallel Corpus Data is a bilingual text is stored in text format. It covers multiple fields such as tourism, medical treatment, daily life, news, etc. average English sentence 23 words. The data desensitization and quality checking had been done. It can be used as a basic corpus for text data analysis in fields such as machine translation.rn For more details, please refer to the link: https://www.nexdata.ai/datasets/1186?source=Huggingface # Specifications ## Storage format TXT ## Data content English-Japanese Parallel Corpus Data ## Data size 0.85 million pairs of English-Japanese Parallel Corpus Data. The English sentences contain 23 words on average. ## Language English, Japanese ## Accuracy rate 90% ## Application scenario machine translation # Licensing Information Commercial License
[ -0.5336967706680298, -0.7732139825820923, 0.2942643463611603, 0.47255244851112366, -0.22684618830680847, -0.03784451261162758, -0.28727179765701294, -0.3985368311405182, 0.6121654510498047, 0.5709323287010193, -0.39591801166534424, -0.8885242938995361, -0.6680867671966553, 0.61915260553359...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Japanese-English_Parallel_Corpus_Data
Nexdata
2023-11-10T07:13:07Z
0
0
null
[ "task_categories:translation", "language:ja", "region:us" ]
2023-11-10T07:13:07Z
2023-11-08T10:50:47.000Z
2023-11-08T10:50:47
--- task_categories: - translation language: - ja --- # Dataset Card for Nexdata/Japanese-English_Parallel_Corpus_Data ## Description Japanese and English parallel corpus, 380,000 groups in total; excluded political, porn, personal information and other sensitive vocabulary; it can be a base corpus for text-based data analysis, used in machine translation and other fields. For more details, please refer to the link: https://www.nexdata.ai/datasets/153?source=Huggingface # Specifications ## Storage format TXT ## Data content Japanese-English Parallel Corpus Data ## Data size 0.38 million pairs of Japanese-English Parallel Corpus Data ## Language Japanese, English ## Application scenario machine translation # Licensing Information Commercial License
[ -0.6740300059318542, -0.6633209586143494, 0.22113542258739471, 0.5079109072685242, -0.10094387829303741, 0.10973440855741501, -0.2096778303384781, -0.3438951075077057, 0.6777833104133606, 0.6587461233139038, -0.5216996073722839, -0.9483186602592468, -0.7899399995803833, 0.39067569375038147...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Traditional_Chinese_Oral_Message_Data
Nexdata
2023-11-10T07:12:42Z
0
0
null
[ "task_categories:conversational", "language:zh", "region:us" ]
2023-11-10T07:12:42Z
2023-11-08T10:53:38.000Z
2023-11-08T10:53:38
--- task_categories: - conversational language: - zh --- # Dataset Card for Nexdata/Traditional_Chinese_Oral_Message_Data ## Description Traditional Chinese SMS corpus, 10 million in total, real traditional Chinese spoken language text data; only contains text messages; the content is stored in txt format; the data set can be used for natural language understanding and related tasks. For more details, please refer to the link: https://www.nexdata.ai/datasets/182?source=Huggingface # Specifications ## Data content Traditional Chinese SMS corpus text data ## Data size 10 million ## Collecting period The year 2,014 ## Storage format txt ## Language Chinese # Licensing Information Commercial License
[ -0.26548731327056885, -0.8830503821372986, 0.08002397418022156, 0.6956434845924377, -0.4018808901309967, -0.22650201618671417, -0.3466719388961792, -0.5704298615455627, 0.5488253831863403, 0.7320136427879333, -0.4948602318763733, -0.764754593372345, -0.2980794906616211, 0.17195414006710052...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Cantonese_Script_Data
Nexdata
2023-11-10T07:11:31Z
0
0
null
[ "region:us" ]
2023-11-10T07:11:31Z
2023-11-08T10:55:02.000Z
2023-11-08T10:55:02
--- # Dataset Card for Nexdata/Cantonese_Script_Data ## Description Cantonese textual data, 82 million pieces in total; data is collected from Cantonese script text; data set can be used for natural language understanding, knowledge base construction and other tasks. For more details, please refer to the link: https://www.nexdata.ai/datasets/188?source=Huggingface # Specifications ## Data content 82 million Cantonese script texts ## Data size 82 million Cantonese script texts ## Collecting period The year 2,015 ## Storage format txt ## Language Cantonese # Licensing Information Commercial License
[ -0.2546437978744507, -0.8996290564537048, 0.09971364587545395, 0.7376828193664551, -0.20647494494915009, -0.3699195086956024, 0.1490148901939392, -0.37430378794670105, 0.5158123970031738, 1.0221343040466309, -0.47692322731018066, -0.7941981554031372, -0.5513010025024414, 0.2514381408691406...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/English_Intention_Annotation_Data_in_Interactive_Scenes
Nexdata
2023-11-10T07:09:51Z
0
0
null
[ "task_categories:text-classification", "language:en", "region:us" ]
2023-11-10T07:09:51Z
2023-11-08T10:56:30.000Z
2023-11-08T10:56:30
--- task_categories: - text-classification language: - en --- # Dataset Card for Nexdata/English_Intention_Annotation_Data_in_Interactive_Scenes ## Description 84,516 Sentences - English Intention Annotation Data in Interactive Scenes, annotated with intent classes, including slot and slot value information; the intent field includes music, weather, date, schedule, home equipment, etc.; it is applied to intent recognition research and related fields. For more details, please refer to the link: https://www.nexdata.ai/datasets/1154?source=Huggingface # Specifications ## Content intent-type single sentence annotation data ## ContentLabel Content Manually write sentences with corresponding intentions and make intent annotations ## Storage Format Excel ## Language English ## Data Size 84,516 Sentences ## Application scenario intention understanding in speech interaction # Licensing Information Commercial License
[ -0.45407724380493164, -0.8646690845489502, 0.4575542211532593, 0.6555711627006531, -0.14459481835365295, -0.7861083149909973, -0.10659831017255783, -0.4676748812198639, 0.5130786299705505, 0.6416625380516052, -0.841221272945404, -0.78299880027771, -0.2514168620109558, -0.03585488721728325,...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Chinese-Japanese_Parallel_Corpus_Data
Nexdata
2023-11-10T07:23:13Z
0
0
null
[ "task_categories:translation", "language:ja", "language:zh", "region:us" ]
2023-11-10T07:23:13Z
2023-11-08T11:00:00.000Z
2023-11-08T11:00:00
--- task_categories: - translation language: - ja - zh --- # Dataset Card for Nexdata/Chinese-Japanese_Parallel_Corpus_Data ## Description 9.83 Million Pairs of Sentences - Chinese-Japanese Parallel Corpus Data be stored in txt format. It covers multiple fields including general, IT, news, patent, and international engine. The data desensitization and quality checking had been done. It can be used as a basic corpus for text data analysis in fields such as machine translation. For more details, please refer to the link: https://www.nexdata.ai/datasets/1069?source=Huggingface # Specifications ## Format TXT ## Data content Chinese-Japanese parallel corpus ## Data size 9.83 million pairs of Chinese-Japanese Parallel Corpus Data. ## Language Chinese, Japanese ## Applications machine translation ## Accuracy rate 90% # Licensing Information Commercial License
[ -0.36474961042404175, -0.7301474809646606, 0.2772642970085144, 0.5708712935447693, -0.2804536521434784, -0.09303124994039536, -0.28300073742866516, -0.27963787317276, 0.4854159653186798, 0.5929646492004395, -0.33080241084098816, -0.841505765914917, -0.5573911666870117, 0.3945731818675995, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
Nexdata/Chinese-Korean_Parallel_Corpus_Data
Nexdata
2023-11-10T07:05:57Z
0
0
null
[ "task_categories:translation", "language:ko", "region:us" ]
2023-11-10T07:05:57Z
2023-11-08T11:01:33.000Z
2023-11-08T11:01:33
--- task_categories: - translation language: - ko --- # Dataset Card for Nexdata/Chinese-Korean_Parallel_Corpus_Data ## Description 12,820,000 sets of parallel translation corpus between China and Korea, which are stored in txt files. It covers many fields including spoken language, traveling, news, and finance. Data cleaning, desensitization, and quality inspection have been carried out. It can be used as the basic corpus database in the text data files as well as used in machine translation. For more details, please refer to the link: https://www.nexdata.ai/datasets/1200?source=Huggingface # Specifications ## Storage format TXT ## Data content Chinese-Korean Parallel Corpus Data ## Data size 12.82 million pairs of Chinese-Korean Parallel Corpus Data. The Chinese sentences contain 25.7 characters on average. ## Language Chinese, Korean ## Accuracy rate 90% ## Application scenario machine translation # Licensing Information Commercial License
[ -0.3457546830177307, -0.4416629374027252, 0.35630443692207336, 0.6355734467506409, -0.20918498933315277, -0.049037933349609375, -0.34900641441345215, -0.36398574709892273, 0.4218003749847412, 0.5628919005393982, -0.290702223777771, -0.907639741897583, -0.47596150636672974, 0.39487645030021...
null
null
null
null
null
null
null
null
null
null
null
null
null
faizalnf1800/web_novel_cover_image
faizalnf1800
2023-11-08T11:07:09Z
0
0
null
[ "region:us" ]
2023-11-08T11:07:09Z
2023-11-08T11:07:09.000Z
2023-11-08T11:07:09
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
MyRebRIc/tomas
MyRebRIc
2023-11-08T11:11:18Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-08T11:11:18Z
2023-11-08T11:07:53.000Z
2023-11-08T11:07:53
--- license: openrail ---
[ -0.12853367626667023, -0.18616794049739838, 0.6529126763343811, 0.4943627417087555, -0.19319313764572144, 0.23607443273067474, 0.36071979999542236, 0.05056338757276535, 0.5793654322624207, 0.7400138974189758, -0.6508103013038635, -0.23783987760543823, -0.710224986076355, -0.047825977206230...
null
null
null
null
null
null
null
null
null
null
null
null
null
Rezuwan/Parrot60_Dataset
Rezuwan
2023-11-08T14:20:27Z
0
0
null
[ "license:apache-2.0", "region:us" ]
2023-11-08T14:20:27Z
2023-11-08T11:11:18.000Z
2023-11-08T11:11:18
--- license: apache-2.0 ---
[ -0.12853367626667023, -0.18616794049739838, 0.6529126763343811, 0.4943627417087555, -0.19319313764572144, 0.23607443273067474, 0.36071979999542236, 0.05056338757276535, 0.5793654322624207, 0.7400138974189758, -0.6508103013038635, -0.23783987760543823, -0.710224986076355, -0.047825977206230...
null
null
null
null
null
null
null
null
null
null
null
null
null
gttsehu/basque_parlament_1
gttsehu
2023-11-08T11:32:29Z
0
0
null
[ "license:cc0-1.0", "region:us" ]
2023-11-08T11:32:29Z
2023-11-08T11:32:29.000Z
2023-11-08T11:32:29
--- license: cc0-1.0 ---
[ -0.12853367626667023, -0.18616794049739838, 0.6529126763343811, 0.4943627417087555, -0.19319313764572144, 0.23607443273067474, 0.36071979999542236, 0.05056338757276535, 0.5793654322624207, 0.7400138974189758, -0.6508103013038635, -0.23783987760543823, -0.710224986076355, -0.047825977206230...
null
null
null
null
null
null
null
null
null
null
null
null
null
sushi009/hf_test_repo
sushi009
2023-11-08T11:46:24Z
0
0
null
[ "region:us" ]
2023-11-08T11:46:24Z
2023-11-08T11:46:24.000Z
2023-11-08T11:46:24
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
Martin7412/Azure
Martin7412
2023-11-08T11:58:35Z
0
0
null
[ "region:us" ]
2023-11-08T11:58:35Z
2023-11-08T11:57:08.000Z
2023-11-08T11:57:08
Types of Resource Locks Shared vs. Exclusive Locks In Azure, shared locks, or read-only locks, allow authorized users to view a <a href="https://dumpsmedia.com/education/an-azure-resource-can-have-multiple-delete-locks/">An Azure Resource can have Multiple Delete Locksl</a> resource but prevent modifications. Exclusive locks restrict all types of operations on the resource, ensuring complete protection against any changes. Read-Only Locks Read-only locks are commonly used to prevent accidental alterations. They enable viewing but prohibit modifications to the resource. Delete Locks Delete locks play a critical role in preventing accidental deletions. Azure resources support multiple delete locks, ensuring an additional layer of security. Importance of Delete Locks Delete locks offer a failsafe against unintentional removal of critical resources. Having multiple delete locks adds further assurance and control, minimizing the risk of inadvertent data loss or service disruptions. Implementing Multiple Delete Locks Azure allows the implementation of multiple delete locks on a single resource, providing enhanced protection. Users can set up multiple delete locks <a href="https://dumpsmedia.com/education/an-azure-resource-can-have-multiple-delete-locks/">An Azure Resource can have Multiple Delete Locksl</a> to fortify resource safety. Click Here For More Details>>>>>>>https://dumpsmedia.com/education/an-azure-resource-can-have-multiple-delete-locks/
[ -0.3607025444507599, -0.41062307357788086, 0.24396905303001404, 0.1653117537498474, -0.22003969550132751, 0.6521841883659363, 0.1921481192111969, -0.5665655136108398, 0.09307257831096649, 0.6056234836578369, -0.5633797645568848, -0.31680625677108765, -0.528927206993103, 0.21302486956119537...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_Weyaxi__OpenOrca-Nebula-7B_public
open-llm-leaderboard
2023-11-08T12:01:52Z
0
0
null
[ "region:us" ]
2023-11-08T12:01:52Z
2023-11-08T12:00:48.000Z
2023-11-08T12:00:48
--- pretty_name: Evaluation run of Weyaxi/OpenOrca-Nebula-7B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Weyaxi/OpenOrca-Nebula-7B](https://huggingface.co/Weyaxi/OpenOrca-Nebula-7B)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Weyaxi__OpenOrca-Nebula-7B_public\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-11-08T11:58:02.317093](https://huggingface.co/datasets/open-llm-leaderboard/details_Weyaxi__OpenOrca-Nebula-7B_public/blob/main/results_2023-11-08T11-58-02.317093.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5781344309327976,\n\ \ \"acc_stderr\": 0.03435050067075012,\n \"acc_norm\": 0.581933273042423,\n\ \ \"acc_norm_stderr\": 0.03433158518593753,\n \"mc1\": 0.3684210526315789,\n\ \ \"mc1_stderr\": 0.016886551261046046,\n \"mc2\": 0.531795789007015,\n\ \ \"mc2_stderr\": 0.015539765760842488\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.552901023890785,\n \"acc_stderr\": 0.014529380160526848,\n\ \ \"acc_norm\": 0.5870307167235495,\n \"acc_norm_stderr\": 0.014388344935398326\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6283608842859988,\n\ \ \"acc_stderr\": 0.004822550638450896,\n \"acc_norm\": 0.8183628759211312,\n\ \ \"acc_norm_stderr\": 0.0038475722596364257\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n\ \ \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n\ \ \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5986842105263158,\n \"acc_stderr\": 0.03988903703336284,\n\ \ \"acc_norm\": 0.5986842105263158,\n \"acc_norm_stderr\": 0.03988903703336284\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.52,\n\ \ \"acc_stderr\": 0.05021167315686779,\n \"acc_norm\": 0.52,\n \ \ \"acc_norm_stderr\": 0.05021167315686779\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.5773584905660377,\n \"acc_stderr\": 0.03040233144576954,\n\ \ \"acc_norm\": 0.5773584905660377,\n \"acc_norm_stderr\": 0.03040233144576954\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6597222222222222,\n\ \ \"acc_stderr\": 0.039621355734862175,\n \"acc_norm\": 0.6597222222222222,\n\ \ \"acc_norm_stderr\": 0.039621355734862175\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \ \ \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"\ acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\"\ : 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6127167630057804,\n\ \ \"acc_stderr\": 0.03714325906302065,\n \"acc_norm\": 0.6127167630057804,\n\ \ \"acc_norm_stderr\": 0.03714325906302065\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.3235294117647059,\n \"acc_stderr\": 0.046550104113196177,\n\ \ \"acc_norm\": 0.3235294117647059,\n \"acc_norm_stderr\": 0.046550104113196177\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n\ \ \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.4595744680851064,\n \"acc_stderr\": 0.03257901482099835,\n\ \ \"acc_norm\": 0.4595744680851064,\n \"acc_norm_stderr\": 0.03257901482099835\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.39473684210526316,\n\ \ \"acc_stderr\": 0.045981880578165414,\n \"acc_norm\": 0.39473684210526316,\n\ \ \"acc_norm_stderr\": 0.045981880578165414\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5310344827586206,\n \"acc_stderr\": 0.04158632762097828,\n\ \ \"acc_norm\": 0.5310344827586206,\n \"acc_norm_stderr\": 0.04158632762097828\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.41798941798941797,\n \"acc_stderr\": 0.02540255550326091,\n \"\ acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.02540255550326091\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n\ \ \"acc_stderr\": 0.04306241259127153,\n \"acc_norm\": 0.36507936507936506,\n\ \ \"acc_norm_stderr\": 0.04306241259127153\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \ \ \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7129032258064516,\n\ \ \"acc_stderr\": 0.025736542745594528,\n \"acc_norm\": 0.7129032258064516,\n\ \ \"acc_norm_stderr\": 0.025736542745594528\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.4729064039408867,\n \"acc_stderr\": 0.03512819077876106,\n\ \ \"acc_norm\": 0.4729064039408867,\n \"acc_norm_stderr\": 0.03512819077876106\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.6,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\"\ : 0.6,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7151515151515152,\n \"acc_stderr\": 0.0352439084451178,\n\ \ \"acc_norm\": 0.7151515151515152,\n \"acc_norm_stderr\": 0.0352439084451178\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7626262626262627,\n \"acc_stderr\": 0.0303137105381989,\n \"acc_norm\"\ : 0.7626262626262627,\n \"acc_norm_stderr\": 0.0303137105381989\n },\n\ \ \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \ \ \"acc\": 0.7823834196891192,\n \"acc_stderr\": 0.029778663037752954,\n\ \ \"acc_norm\": 0.7823834196891192,\n \"acc_norm_stderr\": 0.029778663037752954\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5769230769230769,\n \"acc_stderr\": 0.025049197876042345,\n\ \ \"acc_norm\": 0.5769230769230769,\n \"acc_norm_stderr\": 0.025049197876042345\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3037037037037037,\n \"acc_stderr\": 0.028037929969114986,\n \ \ \"acc_norm\": 0.3037037037037037,\n \"acc_norm_stderr\": 0.028037929969114986\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5630252100840336,\n \"acc_stderr\": 0.032219436365661956,\n\ \ \"acc_norm\": 0.5630252100840336,\n \"acc_norm_stderr\": 0.032219436365661956\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.37748344370860926,\n \"acc_stderr\": 0.0395802723112157,\n \"\ acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.0395802723112157\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7798165137614679,\n \"acc_stderr\": 0.017765978652327562,\n \"\ acc_norm\": 0.7798165137614679,\n \"acc_norm_stderr\": 0.017765978652327562\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.39814814814814814,\n \"acc_stderr\": 0.033384734032074016,\n \"\ acc_norm\": 0.39814814814814814,\n \"acc_norm_stderr\": 0.033384734032074016\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7450980392156863,\n \"acc_stderr\": 0.03058759135160425,\n \"\ acc_norm\": 0.7450980392156863,\n \"acc_norm_stderr\": 0.03058759135160425\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7848101265822784,\n \"acc_stderr\": 0.026750826994676166,\n \ \ \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.026750826994676166\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6412556053811659,\n\ \ \"acc_stderr\": 0.03219079200419995,\n \"acc_norm\": 0.6412556053811659,\n\ \ \"acc_norm_stderr\": 0.03219079200419995\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.7022900763358778,\n \"acc_stderr\": 0.04010358942462203,\n\ \ \"acc_norm\": 0.7022900763358778,\n \"acc_norm_stderr\": 0.04010358942462203\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.71900826446281,\n \"acc_stderr\": 0.04103203830514511,\n \"acc_norm\"\ : 0.71900826446281,\n \"acc_norm_stderr\": 0.04103203830514511\n },\n\ \ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6481481481481481,\n\ \ \"acc_stderr\": 0.046166311118017125,\n \"acc_norm\": 0.6481481481481481,\n\ \ \"acc_norm_stderr\": 0.046166311118017125\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6687116564417178,\n \"acc_stderr\": 0.03697983910025588,\n\ \ \"acc_norm\": 0.6687116564417178,\n \"acc_norm_stderr\": 0.03697983910025588\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n\ \ \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \ \ \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n\ \ \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8162393162393162,\n\ \ \"acc_stderr\": 0.025372139671722933,\n \"acc_norm\": 0.8162393162393162,\n\ \ \"acc_norm_stderr\": 0.025372139671722933\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542129,\n \ \ \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542129\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7739463601532567,\n\ \ \"acc_stderr\": 0.014957458504335833,\n \"acc_norm\": 0.7739463601532567,\n\ \ \"acc_norm_stderr\": 0.014957458504335833\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.615606936416185,\n \"acc_stderr\": 0.026189666966272035,\n\ \ \"acc_norm\": 0.615606936416185,\n \"acc_norm_stderr\": 0.026189666966272035\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3553072625698324,\n\ \ \"acc_stderr\": 0.01600698993480319,\n \"acc_norm\": 0.3553072625698324,\n\ \ \"acc_norm_stderr\": 0.01600698993480319\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.630718954248366,\n \"acc_stderr\": 0.02763417668960266,\n\ \ \"acc_norm\": 0.630718954248366,\n \"acc_norm_stderr\": 0.02763417668960266\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6463022508038585,\n\ \ \"acc_stderr\": 0.02715520810320086,\n \"acc_norm\": 0.6463022508038585,\n\ \ \"acc_norm_stderr\": 0.02715520810320086\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6759259259259259,\n \"acc_stderr\": 0.02604176620271716,\n\ \ \"acc_norm\": 0.6759259259259259,\n \"acc_norm_stderr\": 0.02604176620271716\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.42907801418439717,\n \"acc_stderr\": 0.02952591430255856,\n \ \ \"acc_norm\": 0.42907801418439717,\n \"acc_norm_stderr\": 0.02952591430255856\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4302477183833116,\n\ \ \"acc_stderr\": 0.012645361435115233,\n \"acc_norm\": 0.4302477183833116,\n\ \ \"acc_norm_stderr\": 0.012645361435115233\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.5220588235294118,\n \"acc_stderr\": 0.03034326422421352,\n\ \ \"acc_norm\": 0.5220588235294118,\n \"acc_norm_stderr\": 0.03034326422421352\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.5980392156862745,\n \"acc_stderr\": 0.01983517648437539,\n \ \ \"acc_norm\": 0.5980392156862745,\n \"acc_norm_stderr\": 0.01983517648437539\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n\ \ \"acc_stderr\": 0.04582004841505417,\n \"acc_norm\": 0.6454545454545455,\n\ \ \"acc_norm_stderr\": 0.04582004841505417\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5265306122448979,\n \"acc_stderr\": 0.03196412734523272,\n\ \ \"acc_norm\": 0.5265306122448979,\n \"acc_norm_stderr\": 0.03196412734523272\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7711442786069652,\n\ \ \"acc_stderr\": 0.029705284056772432,\n \"acc_norm\": 0.7711442786069652,\n\ \ \"acc_norm_stderr\": 0.029705284056772432\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.79,\n \"acc_stderr\": 0.040936018074033256,\n \ \ \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.040936018074033256\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4578313253012048,\n\ \ \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.4578313253012048,\n\ \ \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7602339181286549,\n \"acc_stderr\": 0.032744852119469564,\n\ \ \"acc_norm\": 0.7602339181286549,\n \"acc_norm_stderr\": 0.032744852119469564\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3684210526315789,\n\ \ \"mc1_stderr\": 0.016886551261046046,\n \"mc2\": 0.531795789007015,\n\ \ \"mc2_stderr\": 0.015539765760842488\n }\n}\n```" repo_url: https://huggingface.co/Weyaxi/OpenOrca-Nebula-7B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|arc:challenge|25_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hellaswag|10_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-08T11-58-02.317093.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-management|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T11-58-02.317093.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_08T11_58_02.317093 path: - '**/details_harness|truthfulqa:mc|0_2023-11-08T11-58-02.317093.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-08T11-58-02.317093.parquet' - config_name: results data_files: - split: 2023_11_08T11_58_02.317093 path: - results_2023-11-08T11-58-02.317093.parquet - split: latest path: - results_2023-11-08T11-58-02.317093.parquet --- # Dataset Card for Evaluation run of Weyaxi/OpenOrca-Nebula-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Weyaxi/OpenOrca-Nebula-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [Weyaxi/OpenOrca-Nebula-7B](https://huggingface.co/Weyaxi/OpenOrca-Nebula-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Weyaxi__OpenOrca-Nebula-7B_public", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T11:58:02.317093](https://huggingface.co/datasets/open-llm-leaderboard/details_Weyaxi__OpenOrca-Nebula-7B_public/blob/main/results_2023-11-08T11-58-02.317093.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5781344309327976, "acc_stderr": 0.03435050067075012, "acc_norm": 0.581933273042423, "acc_norm_stderr": 0.03433158518593753, "mc1": 0.3684210526315789, "mc1_stderr": 0.016886551261046046, "mc2": 0.531795789007015, "mc2_stderr": 0.015539765760842488 }, "harness|arc:challenge|25": { "acc": 0.552901023890785, "acc_stderr": 0.014529380160526848, "acc_norm": 0.5870307167235495, "acc_norm_stderr": 0.014388344935398326 }, "harness|hellaswag|10": { "acc": 0.6283608842859988, "acc_stderr": 0.004822550638450896, "acc_norm": 0.8183628759211312, "acc_norm_stderr": 0.0038475722596364257 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099582, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5986842105263158, "acc_stderr": 0.03988903703336284, "acc_norm": 0.5986842105263158, "acc_norm_stderr": 0.03988903703336284 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.52, "acc_stderr": 0.05021167315686779, "acc_norm": 0.52, "acc_norm_stderr": 0.05021167315686779 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5773584905660377, "acc_stderr": 0.03040233144576954, "acc_norm": 0.5773584905660377, "acc_norm_stderr": 0.03040233144576954 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6597222222222222, "acc_stderr": 0.039621355734862175, "acc_norm": 0.6597222222222222, "acc_norm_stderr": 0.039621355734862175 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6127167630057804, "acc_stderr": 0.03714325906302065, "acc_norm": 0.6127167630057804, "acc_norm_stderr": 0.03714325906302065 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.046550104113196177, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.046550104113196177 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4595744680851064, "acc_stderr": 0.03257901482099835, "acc_norm": 0.4595744680851064, "acc_norm_stderr": 0.03257901482099835 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.39473684210526316, "acc_stderr": 0.045981880578165414, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.045981880578165414 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5310344827586206, "acc_stderr": 0.04158632762097828, "acc_norm": 0.5310344827586206, "acc_norm_stderr": 0.04158632762097828 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.02540255550326091, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.02540255550326091 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7129032258064516, "acc_stderr": 0.025736542745594528, "acc_norm": 0.7129032258064516, "acc_norm_stderr": 0.025736542745594528 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4729064039408867, "acc_stderr": 0.03512819077876106, "acc_norm": 0.4729064039408867, "acc_norm_stderr": 0.03512819077876106 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.6, "acc_stderr": 0.049236596391733084, "acc_norm": 0.6, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7151515151515152, "acc_stderr": 0.0352439084451178, "acc_norm": 0.7151515151515152, "acc_norm_stderr": 0.0352439084451178 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7626262626262627, "acc_stderr": 0.0303137105381989, "acc_norm": 0.7626262626262627, "acc_norm_stderr": 0.0303137105381989 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7823834196891192, "acc_stderr": 0.029778663037752954, "acc_norm": 0.7823834196891192, "acc_norm_stderr": 0.029778663037752954 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5769230769230769, "acc_stderr": 0.025049197876042345, "acc_norm": 0.5769230769230769, "acc_norm_stderr": 0.025049197876042345 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3037037037037037, "acc_stderr": 0.028037929969114986, "acc_norm": 0.3037037037037037, "acc_norm_stderr": 0.028037929969114986 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5630252100840336, "acc_stderr": 0.032219436365661956, "acc_norm": 0.5630252100840336, "acc_norm_stderr": 0.032219436365661956 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.0395802723112157, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.0395802723112157 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7798165137614679, "acc_stderr": 0.017765978652327562, "acc_norm": 0.7798165137614679, "acc_norm_stderr": 0.017765978652327562 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.39814814814814814, "acc_stderr": 0.033384734032074016, "acc_norm": 0.39814814814814814, "acc_norm_stderr": 0.033384734032074016 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7450980392156863, "acc_stderr": 0.03058759135160425, "acc_norm": 0.7450980392156863, "acc_norm_stderr": 0.03058759135160425 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7848101265822784, "acc_stderr": 0.026750826994676166, "acc_norm": 0.7848101265822784, "acc_norm_stderr": 0.026750826994676166 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6412556053811659, "acc_stderr": 0.03219079200419995, "acc_norm": 0.6412556053811659, "acc_norm_stderr": 0.03219079200419995 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7022900763358778, "acc_stderr": 0.04010358942462203, "acc_norm": 0.7022900763358778, "acc_norm_stderr": 0.04010358942462203 }, "harness|hendrycksTest-international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.04103203830514511, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.04103203830514511 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6481481481481481, "acc_stderr": 0.046166311118017125, "acc_norm": 0.6481481481481481, "acc_norm_stderr": 0.046166311118017125 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6687116564417178, "acc_stderr": 0.03697983910025588, "acc_norm": 0.6687116564417178, "acc_norm_stderr": 0.03697983910025588 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8162393162393162, "acc_stderr": 0.025372139671722933, "acc_norm": 0.8162393162393162, "acc_norm_stderr": 0.025372139671722933 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542129, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542129 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7739463601532567, "acc_stderr": 0.014957458504335833, "acc_norm": 0.7739463601532567, "acc_norm_stderr": 0.014957458504335833 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.615606936416185, "acc_stderr": 0.026189666966272035, "acc_norm": 0.615606936416185, "acc_norm_stderr": 0.026189666966272035 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3553072625698324, "acc_stderr": 0.01600698993480319, "acc_norm": 0.3553072625698324, "acc_norm_stderr": 0.01600698993480319 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.630718954248366, "acc_stderr": 0.02763417668960266, "acc_norm": 0.630718954248366, "acc_norm_stderr": 0.02763417668960266 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6463022508038585, "acc_stderr": 0.02715520810320086, "acc_norm": 0.6463022508038585, "acc_norm_stderr": 0.02715520810320086 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6759259259259259, "acc_stderr": 0.02604176620271716, "acc_norm": 0.6759259259259259, "acc_norm_stderr": 0.02604176620271716 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.42907801418439717, "acc_stderr": 0.02952591430255856, "acc_norm": 0.42907801418439717, "acc_norm_stderr": 0.02952591430255856 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4302477183833116, "acc_stderr": 0.012645361435115233, "acc_norm": 0.4302477183833116, "acc_norm_stderr": 0.012645361435115233 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5220588235294118, "acc_stderr": 0.03034326422421352, "acc_norm": 0.5220588235294118, "acc_norm_stderr": 0.03034326422421352 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5980392156862745, "acc_stderr": 0.01983517648437539, "acc_norm": 0.5980392156862745, "acc_norm_stderr": 0.01983517648437539 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.04582004841505417, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.04582004841505417 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5265306122448979, "acc_stderr": 0.03196412734523272, "acc_norm": 0.5265306122448979, "acc_norm_stderr": 0.03196412734523272 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7711442786069652, "acc_stderr": 0.029705284056772432, "acc_norm": 0.7711442786069652, "acc_norm_stderr": 0.029705284056772432 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.79, "acc_stderr": 0.040936018074033256, "acc_norm": 0.79, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.4578313253012048, "acc_stderr": 0.0387862677100236, "acc_norm": 0.4578313253012048, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7602339181286549, "acc_stderr": 0.032744852119469564, "acc_norm": 0.7602339181286549, "acc_norm_stderr": 0.032744852119469564 }, "harness|truthfulqa:mc|0": { "mc1": 0.3684210526315789, "mc1_stderr": 0.016886551261046046, "mc2": 0.531795789007015, "mc2_stderr": 0.015539765760842488 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7301012873649597, -0.8451433181762695, 0.2815612554550171, 0.19767087697982788, -0.18912255764007568, -0.0749097615480423, 0.031257204711437225, -0.22665652632713318, 0.5999518632888794, -0.03262026607990265, -0.45544466376304626, -0.7243497371673584, -0.43413907289505005, 0.22836242616...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_pankajmathur__Mistral-7B-model_45k6e2e4_public
open-llm-leaderboard
2023-11-08T12:04:33Z
0
0
null
[ "region:us" ]
2023-11-08T12:04:33Z
2023-11-08T12:03:34.000Z
2023-11-08T12:03:34
--- pretty_name: Evaluation run of pankajmathur/Mistral-7B-model_45k6e2e4 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [pankajmathur/Mistral-7B-model_45k6e2e4](https://huggingface.co/pankajmathur/Mistral-7B-model_45k6e2e4)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_pankajmathur__Mistral-7B-model_45k6e2e4_public\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-11-08T12:00:55.074514](https://huggingface.co/datasets/open-llm-leaderboard/details_pankajmathur__Mistral-7B-model_45k6e2e4_public/blob/main/results_2023-11-08T12-00-55.074514.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23184197178254598,\n\ \ \"acc_stderr\": 0.030693965962788314,\n \"acc_norm\": 0.23241967500614574,\n\ \ \"acc_norm_stderr\": 0.030706854185546608,\n \"mc1\": 0.24357405140758873,\n\ \ \"mc1_stderr\": 0.01502635482491078,\n \"mc2\": 0.5084657838244592,\n\ \ \"mc2_stderr\": 0.016201328114036084\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.2022184300341297,\n \"acc_stderr\": 0.011737454431872104,\n\ \ \"acc_norm\": 0.2431740614334471,\n \"acc_norm_stderr\": 0.01253655414458709\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2577175861382195,\n\ \ \"acc_stderr\": 0.004364838000335622,\n \"acc_norm\": 0.2508464449312886,\n\ \ \"acc_norm_stderr\": 0.0043261434303600976\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.17,\n \"acc_stderr\": 0.0377525168068637,\n \ \ \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.0377525168068637\n },\n\ \ \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.18518518518518517,\n\ \ \"acc_stderr\": 0.03355677216313142,\n \"acc_norm\": 0.18518518518518517,\n\ \ \"acc_norm_stderr\": 0.03355677216313142\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123398,\n\ \ \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123398\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.3,\n\ \ \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \ \ \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.02528839450289137,\n\ \ \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.02528839450289137\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n\ \ \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n\ \ \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \ \ \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n\ \ \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653695,\n \ \ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653695\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.20809248554913296,\n\ \ \"acc_stderr\": 0.030952890217749874,\n \"acc_norm\": 0.20809248554913296,\n\ \ \"acc_norm_stderr\": 0.030952890217749874\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n\ \ \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n\ \ \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.028809989854102973,\n\ \ \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.028809989854102973\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n\ \ \"acc_stderr\": 0.039994238792813365,\n \"acc_norm\": 0.23684210526315788,\n\ \ \"acc_norm_stderr\": 0.039994238792813365\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.23448275862068965,\n \"acc_stderr\": 0.035306258743465914,\n\ \ \"acc_norm\": 0.23448275862068965,\n \"acc_norm_stderr\": 0.035306258743465914\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.24867724867724866,\n \"acc_stderr\": 0.022261817692400175,\n \"\ acc_norm\": 0.24867724867724866,\n \"acc_norm_stderr\": 0.022261817692400175\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30158730158730157,\n\ \ \"acc_stderr\": 0.04104947269903394,\n \"acc_norm\": 0.30158730158730157,\n\ \ \"acc_norm_stderr\": 0.04104947269903394\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536934,\n \ \ \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536934\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.1774193548387097,\n \"acc_stderr\": 0.02173254068932927,\n \"\ acc_norm\": 0.1774193548387097,\n \"acc_norm_stderr\": 0.02173254068932927\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.15270935960591134,\n \"acc_stderr\": 0.02530890453938062,\n \"\ acc_norm\": 0.15270935960591134,\n \"acc_norm_stderr\": 0.02530890453938062\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\"\ : 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n\ \ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.17676767676767677,\n \"acc_stderr\": 0.027178752639044915,\n \"\ acc_norm\": 0.17676767676767677,\n \"acc_norm_stderr\": 0.027178752639044915\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.19689119170984457,\n \"acc_stderr\": 0.028697873971860664,\n\ \ \"acc_norm\": 0.19689119170984457,\n \"acc_norm_stderr\": 0.028697873971860664\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.20512820512820512,\n \"acc_stderr\": 0.02047323317355198,\n\ \ \"acc_norm\": 0.20512820512820512,\n \"acc_norm_stderr\": 0.02047323317355198\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2074074074074074,\n \"acc_stderr\": 0.024720713193952165,\n \ \ \"acc_norm\": 0.2074074074074074,\n \"acc_norm_stderr\": 0.024720713193952165\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.21008403361344538,\n \"acc_stderr\": 0.026461398717471874,\n\ \ \"acc_norm\": 0.21008403361344538,\n \"acc_norm_stderr\": 0.026461398717471874\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.18543046357615894,\n \"acc_stderr\": 0.03173284384294285,\n \"\ acc_norm\": 0.18543046357615894,\n \"acc_norm_stderr\": 0.03173284384294285\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.1926605504587156,\n \"acc_stderr\": 0.016909276884936094,\n \"\ acc_norm\": 0.1926605504587156,\n \"acc_norm_stderr\": 0.016909276884936094\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.1527777777777778,\n \"acc_stderr\": 0.024536326026134224,\n \"\ acc_norm\": 0.1527777777777778,\n \"acc_norm_stderr\": 0.024536326026134224\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n\ \ \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\"\ : {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293426,\n\ \ \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293426\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.31390134529147984,\n\ \ \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.31390134529147984,\n\ \ \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.2595419847328244,\n \"acc_stderr\": 0.03844876139785271,\n\ \ \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"\ acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n\ \ \"acc_stderr\": 0.042365112580946336,\n \"acc_norm\": 0.25925925925925924,\n\ \ \"acc_norm_stderr\": 0.042365112580946336\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.22085889570552147,\n \"acc_stderr\": 0.032591773927421776,\n\ \ \"acc_norm\": 0.22085889570552147,\n \"acc_norm_stderr\": 0.032591773927421776\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.33035714285714285,\n\ \ \"acc_stderr\": 0.04464285714285713,\n \"acc_norm\": 0.33035714285714285,\n\ \ \"acc_norm_stderr\": 0.04464285714285713\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.17475728155339806,\n \"acc_stderr\": 0.037601780060266224,\n\ \ \"acc_norm\": 0.17475728155339806,\n \"acc_norm_stderr\": 0.037601780060266224\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2905982905982906,\n\ \ \"acc_stderr\": 0.02974504857267404,\n \"acc_norm\": 0.2905982905982906,\n\ \ \"acc_norm_stderr\": 0.02974504857267404\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.23754789272030652,\n\ \ \"acc_stderr\": 0.015218733046150193,\n \"acc_norm\": 0.23754789272030652,\n\ \ \"acc_norm_stderr\": 0.015218733046150193\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n\ \ \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n\ \ \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n\ \ \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.023929155517351284,\n\ \ \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.023929155517351284\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.1864951768488746,\n\ \ \"acc_stderr\": 0.02212243977248077,\n \"acc_norm\": 0.1864951768488746,\n\ \ \"acc_norm_stderr\": 0.02212243977248077\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n\ \ \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432414,\n \ \ \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432414\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2457627118644068,\n\ \ \"acc_stderr\": 0.010996156635142692,\n \"acc_norm\": 0.2457627118644068,\n\ \ \"acc_norm_stderr\": 0.010996156635142692\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.023529242185193106,\n\ \ \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.023529242185193106\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.25,\n \"acc_stderr\": 0.01751781884501444,\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.01751781884501444\n },\n \"harness|hendrycksTest-public_relations|5\"\ : {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n\ \ \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n\ \ },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.18775510204081633,\n\ \ \"acc_stderr\": 0.02500025603954621,\n \"acc_norm\": 0.18775510204081633,\n\ \ \"acc_norm_stderr\": 0.02500025603954621\n },\n \"harness|hendrycksTest-sociology|5\"\ : {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.03036049015401465,\n\ \ \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.03036049015401465\n\ \ },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\":\ \ 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n\ \ \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-virology|5\"\ : {\n \"acc\": 0.28313253012048195,\n \"acc_stderr\": 0.03507295431370518,\n\ \ \"acc_norm\": 0.28313253012048195,\n \"acc_norm_stderr\": 0.03507295431370518\n\ \ },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3216374269005848,\n\ \ \"acc_stderr\": 0.03582529442573122,\n \"acc_norm\": 0.3216374269005848,\n\ \ \"acc_norm_stderr\": 0.03582529442573122\n },\n \"harness|truthfulqa:mc|0\"\ : {\n \"mc1\": 0.24357405140758873,\n \"mc1_stderr\": 0.01502635482491078,\n\ \ \"mc2\": 0.5084657838244592,\n \"mc2_stderr\": 0.016201328114036084\n\ \ }\n}\n```" repo_url: https://huggingface.co/pankajmathur/Mistral-7B-model_45k6e2e4 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|arc:challenge|25_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hellaswag|10_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-08T12-00-55.074514.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-management|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T12-00-55.074514.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_08T12_00_55.074514 path: - '**/details_harness|truthfulqa:mc|0_2023-11-08T12-00-55.074514.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-08T12-00-55.074514.parquet' - config_name: results data_files: - split: 2023_11_08T12_00_55.074514 path: - results_2023-11-08T12-00-55.074514.parquet - split: latest path: - results_2023-11-08T12-00-55.074514.parquet --- # Dataset Card for Evaluation run of pankajmathur/Mistral-7B-model_45k6e2e4 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/pankajmathur/Mistral-7B-model_45k6e2e4 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [pankajmathur/Mistral-7B-model_45k6e2e4](https://huggingface.co/pankajmathur/Mistral-7B-model_45k6e2e4) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_pankajmathur__Mistral-7B-model_45k6e2e4_public", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T12:00:55.074514](https://huggingface.co/datasets/open-llm-leaderboard/details_pankajmathur__Mistral-7B-model_45k6e2e4_public/blob/main/results_2023-11-08T12-00-55.074514.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23184197178254598, "acc_stderr": 0.030693965962788314, "acc_norm": 0.23241967500614574, "acc_norm_stderr": 0.030706854185546608, "mc1": 0.24357405140758873, "mc1_stderr": 0.01502635482491078, "mc2": 0.5084657838244592, "mc2_stderr": 0.016201328114036084 }, "harness|arc:challenge|25": { "acc": 0.2022184300341297, "acc_stderr": 0.011737454431872104, "acc_norm": 0.2431740614334471, "acc_norm_stderr": 0.01253655414458709 }, "harness|hellaswag|10": { "acc": 0.2577175861382195, "acc_stderr": 0.004364838000335622, "acc_norm": 0.2508464449312886, "acc_norm_stderr": 0.0043261434303600976 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.17, "acc_stderr": 0.0377525168068637, "acc_norm": 0.17, "acc_norm_stderr": 0.0377525168068637 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.18518518518518517, "acc_stderr": 0.03355677216313142, "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123398, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123398 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.02528839450289137, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.02528839450289137 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.18, "acc_stderr": 0.03861229196653695, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653695 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.20809248554913296, "acc_stderr": 0.030952890217749874, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.030952890217749874 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.028809989854102973, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.028809989854102973 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813365, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813365 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.23448275862068965, "acc_stderr": 0.035306258743465914, "acc_norm": 0.23448275862068965, "acc_norm_stderr": 0.035306258743465914 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24867724867724866, "acc_stderr": 0.022261817692400175, "acc_norm": 0.24867724867724866, "acc_norm_stderr": 0.022261817692400175 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30158730158730157, "acc_stderr": 0.04104947269903394, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.04104947269903394 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.18, "acc_stderr": 0.038612291966536934, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536934 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1774193548387097, "acc_stderr": 0.02173254068932927, "acc_norm": 0.1774193548387097, "acc_norm_stderr": 0.02173254068932927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.15270935960591134, "acc_stderr": 0.02530890453938062, "acc_norm": 0.15270935960591134, "acc_norm_stderr": 0.02530890453938062 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.17676767676767677, "acc_stderr": 0.027178752639044915, "acc_norm": 0.17676767676767677, "acc_norm_stderr": 0.027178752639044915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.19689119170984457, "acc_stderr": 0.028697873971860664, "acc_norm": 0.19689119170984457, "acc_norm_stderr": 0.028697873971860664 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.20512820512820512, "acc_stderr": 0.02047323317355198, "acc_norm": 0.20512820512820512, "acc_norm_stderr": 0.02047323317355198 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2074074074074074, "acc_stderr": 0.024720713193952165, "acc_norm": 0.2074074074074074, "acc_norm_stderr": 0.024720713193952165 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.21008403361344538, "acc_stderr": 0.026461398717471874, "acc_norm": 0.21008403361344538, "acc_norm_stderr": 0.026461398717471874 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.18543046357615894, "acc_stderr": 0.03173284384294285, "acc_norm": 0.18543046357615894, "acc_norm_stderr": 0.03173284384294285 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.1926605504587156, "acc_stderr": 0.016909276884936094, "acc_norm": 0.1926605504587156, "acc_norm_stderr": 0.016909276884936094 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.1527777777777778, "acc_stderr": 0.024536326026134224, "acc_norm": 0.1527777777777778, "acc_norm_stderr": 0.024536326026134224 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293426, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293426 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.31390134529147984, "acc_stderr": 0.031146796482972465, "acc_norm": 0.31390134529147984, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2595419847328244, "acc_stderr": 0.03844876139785271, "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.042365112580946336, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22085889570552147, "acc_stderr": 0.032591773927421776, "acc_norm": 0.22085889570552147, "acc_norm_stderr": 0.032591773927421776 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.33035714285714285, "acc_stderr": 0.04464285714285713, "acc_norm": 0.33035714285714285, "acc_norm_stderr": 0.04464285714285713 }, "harness|hendrycksTest-management|5": { "acc": 0.17475728155339806, "acc_stderr": 0.037601780060266224, "acc_norm": 0.17475728155339806, "acc_norm_stderr": 0.037601780060266224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2905982905982906, "acc_stderr": 0.02974504857267404, "acc_norm": 0.2905982905982906, "acc_norm_stderr": 0.02974504857267404 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.23754789272030652, "acc_stderr": 0.015218733046150193, "acc_norm": 0.23754789272030652, "acc_norm_stderr": 0.015218733046150193 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.22549019607843138, "acc_stderr": 0.023929155517351284, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.023929155517351284 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.1864951768488746, "acc_stderr": 0.02212243977248077, "acc_norm": 0.1864951768488746, "acc_norm_stderr": 0.02212243977248077 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432414, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432414 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2457627118644068, "acc_stderr": 0.010996156635142692, "acc_norm": 0.2457627118644068, "acc_norm_stderr": 0.010996156635142692 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.023529242185193106, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.023529242185193106 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.25, "acc_stderr": 0.01751781884501444, "acc_norm": 0.25, "acc_norm_stderr": 0.01751781884501444 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.18775510204081633, "acc_stderr": 0.02500025603954621, "acc_norm": 0.18775510204081633, "acc_norm_stderr": 0.02500025603954621 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.03036049015401465, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401465 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-virology|5": { "acc": 0.28313253012048195, "acc_stderr": 0.03507295431370518, "acc_norm": 0.28313253012048195, "acc_norm_stderr": 0.03507295431370518 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3216374269005848, "acc_stderr": 0.03582529442573122, "acc_norm": 0.3216374269005848, "acc_norm_stderr": 0.03582529442573122 }, "harness|truthfulqa:mc|0": { "mc1": 0.24357405140758873, "mc1_stderr": 0.01502635482491078, "mc2": 0.5084657838244592, "mc2_stderr": 0.016201328114036084 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7476470470428467, -0.8666878342628479, 0.25134357810020447, 0.20474964380264282, -0.17895224690437317, -0.08719049394130707, 0.02714966982603073, -0.22886915504932404, 0.5436928272247314, -0.015169434249401093, -0.48692864179611206, -0.6727681159973145, -0.45005548000335693, 0.255043923...
null
null
null
null
null
null
null
null
null
null
null
null
null
Praghxx/Nosred
Praghxx
2023-11-08T12:26:00Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-08T12:26:00Z
2023-11-08T12:04:13.000Z
2023-11-08T12:04:13
--- license: openrail ---
[ -0.12853367626667023, -0.18616794049739838, 0.6529126763343811, 0.4943627417087555, -0.19319313764572144, 0.23607443273067474, 0.36071979999542236, 0.05056338757276535, 0.5793654322624207, 0.7400138974189758, -0.6508103013038635, -0.23783987760543823, -0.710224986076355, -0.047825977206230...
null
null
null
null
null
null
null
null
null
null
null
null
null
AgelessBodySystem/Ageless-Body-System-Official-Website
AgelessBodySystem
2023-11-08T12:22:42Z
0
0
null
[ "region:us" ]
2023-11-08T12:22:42Z
2023-11-08T12:22:42.000Z
2023-11-08T12:22:42
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
exactor101/amarDataSet
exactor101
2023-11-08T12:31:05Z
0
0
null
[ "region:us" ]
2023-11-08T12:31:05Z
2023-11-08T12:31:05.000Z
2023-11-08T12:31:05
Entry not found
[ -0.3227649927139282, -0.225684255361557, 0.862226128578186, 0.43461498618125916, -0.5282987952232361, 0.7012963891029358, 0.7915717363357544, 0.07618629932403564, 0.7746025919914246, 0.2563219666481018, -0.7852816581726074, -0.2257382869720459, -0.9104480743408203, 0.5715669393539429, -0...
null
null
null
null
null
null
null
null
null
null
null
null
null
Oedipe/modeleagg
Oedipe
2023-11-08T20:40:10Z
0
0
null
[ "license:unknown", "region:us" ]
2023-11-08T20:40:10Z
2023-11-08T12:45:54.000Z
2023-11-08T12:45:54
--- license: unknown ---
[ -0.12853367626667023, -0.18616794049739838, 0.6529126763343811, 0.4943627417087555, -0.19319313764572144, 0.23607443273067474, 0.36071979999542236, 0.05056338757276535, 0.5793654322624207, 0.7400138974189758, -0.6508103013038635, -0.23783987760543823, -0.710224986076355, -0.047825977206230...
null
null
null
null
null
null
null
null
null
null
null
null
null
md-nishat-008/OffMix-3L
md-nishat-008
2023-11-08T12:56:32Z
0
0
null
[ "license:cc-by-nd-4.0", "arxiv:2310.18387", "region:us" ]
2023-11-08T12:56:32Z
2023-11-08T12:55:53.000Z
2023-11-08T12:55:53
--- license: cc-by-nd-4.0 --- # OffMix-3L: A Novel Code-Mixed Dataset in Bangla-English-Hindi for Offensive Language Identification **Publication**: *The 11th International Workshop on Natural Language Processing for Social Media (SocialNLP) under AACL-2023.* **Read in [arXiv](https://arxiv.org/pdf/2310.18387.pdf)** --- ## 📝 Citation When using the OffMix-3L dataset, please cite the following: ```bibtex @article{goswami2023offmix, title={OffMix-3L: A Novel Code-Mixed Dataset in Bangla-English-Hindi for Offensive Language Identification}, author={Goswami, Dhiman and Raihan, Md Nishat and Mahmud, Antara and Anstasopoulos, Antonios and Zampieri, Marcos}, journal={arXiv preprint arXiv:2310.18387}, year={2023} } ``` --- ## 📖 Introduction Code-mixing is a well-studied linguistic phenomenon when two or more languages are mixed in text or speech. Several datasets have been built with the goal of training computational models for code-mixing. Although it is very common to observe code-mixing with multiple languages, most datasets available contain code-mixed between only two languages. In this paper, we introduce OffMix-3L, a novel dataset for sentiment analysis containing code-mixed data between three languages: Bangla, English, and Hindi. --- ## 📊 Dataset Details We introduce **OffMix-3L**, a novel three-language code-mixed test dataset with gold standard labels in Bangla-Hindi-English for the task of Sentiment Analysis, containing 1,001 instances. > We are presenting this dataset exclusively as a test set due to the unique and specialized nature of the task. Such data is very difficult to gather and requires significant expertise to access. The size of the dataset, while limiting for training purposes, offers a high-quality testing environment with gold-standard labels that can serve as a benchmark in this domain. --- ## 📈 Dataset Statistics | | **All** | **Bangla** | **English** | **Hindi** | **Other** | |-------------------|---------|------------|-------------|-----------|-----------| | Tokens | 87,190 | 31,228 | 6,690 | 14,694 | 34,578 | | Types | 18,787 | 7,714 | 1,135 | 1,413 | 8,645 | | Max. in instance | 173 | 62 | 20 | 47 | 93 | | Min. in instance | 41 | 4 | 3 | 2 | 8 | | Avg | 87.10 | 31.20 | 6.68 | 14.68 | 34.54 | | Std Dev | 20.58 | 8.60 | 3.05 | 5.74 | 10.98 | *OffMix-3L Data Card. The row "Avg" represents the average number of tokens with its standard deviation in row "Std Dev".* --- ## 📉 Results | Models | F1 Score | |----------------|----------| | BanglishBERT | **0.68** | | BERT | 0.66 | | mBERT | 0.63 | | HingBERT | 0.60 | | MuRIL | 0.60 | | HateBERT | 0.60 | | fBERT | 0.58 | | roBERTa | 0.58 | | XLM-R | 0.57 | | DistilBERT | 0.57 | | GPT 3.5 Turbo | 0.57 | | BanglaBERT | 0.54 | | IndicBERT | 0.55 | | HindiBERT | 0.43 | *Weighted F-1 score for different models: training on synthetic and tested on natural data (OffMix-3L).*
[ -0.5417752861976624, -0.6778327226638794, 0.05386975407600403, 0.43748071789741516, -0.23839877545833588, 0.2616294026374817, -0.16339440643787384, -0.44297701120376587, 0.24632184207439423, 0.1988707184791565, -0.2802610397338867, -0.7781231999397278, -0.6506693363189697, 0.09700512886047...
null
null
null
null
null
null
null
null
null
null
null
null
null
Otter-AI/ShareGPT4-V
Otter-AI
2023-11-08T23:20:22Z
0
0
null
[ "license:mit", "region:us" ]
2023-11-08T23:20:22Z
2023-11-08T13:05:25.000Z
2023-11-08T13:05:25
--- license: mit --- We use our demo's backend user logs (filtered to make sure do not contain any senstive and user related information) to query GPT4-V and share the questions/response. For each user log, we query GPT4-V to answer the user's questions and let it self-instruct to generate 4-5 questions and answers related to the image. language: - en license: - cc0-1.0 tags: - image-text-dataset - gpt-assisted-dataset dataset_info: features: - name: instruction dtype: string - name: response dtype: string - name: image dtype: image - name: link dtype: string - name: message_id dtype: string - name: timestamp dtype: string configs: - config_name: default data_files: - split: train path: data/*_sharegpt_log.parquet
[ -0.8112224340438843, -0.48641589283943176, 0.7253723740577698, 0.02684265933930874, -0.4589921236038208, -0.16904613375663757, 0.21015632152557373, -0.5106042623519897, 0.11754128336906433, 0.5329688191413879, -0.9176336526870728, -0.7036167979240417, -0.34800177812576294, 0.06302519887685...
null
null
null
null
null
null
null
null
null
null
null
null
null
dialbird/mental_health_chatbot_dataset
dialbird
2023-11-08T13:14:28Z
0
0
null
[ "region:us" ]
2023-11-08T13:14:28Z
2023-11-08T13:14:24.000Z
2023-11-08T13:14:24
--- dataset_info: features: - name: text dtype: string splits: - name: train num_bytes: 189421 num_examples: 172 download_size: 102271 dataset_size: 189421 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "mental_health_chatbot_dataset" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.4714198112487793, -0.4830271601676941, 0.29335451126098633, 0.17543667554855347, 0.018300555646419525, 0.13352543115615845, 0.17129790782928467, -0.06345336884260178, 0.8029800653457642, 0.5189820528030396, -1.050869345664978, -0.717017650604248, -0.7273019552230835, -0.4086387455463409...
null
null
null
null
null
null
null
null
null
null
null
null
null
hazmannaim/angkasfera_text
hazmannaim
2023-11-08T13:21:31Z
0
0
null
[ "region:us" ]
2023-11-08T13:21:31Z
2023-11-08T13:17:04.000Z
2023-11-08T13:17:04
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622264862060547, 0.43461528420448303, -0.52829909324646, 0.7012971639633179, 0.7915720343589783, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104477167129517, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
gh1407/synthetic_causal_pairs
gh1407
2023-11-08T14:23:26Z
0
0
null
[ "license:mit", "region:us" ]
2023-11-08T14:23:26Z
2023-11-08T13:37:47.000Z
2023-11-08T13:37:47
--- license: mit dataset_info: features: - name: 'Unnamed: 0.1' dtype: int64 - name: 'Unnamed: 0' dtype: int64 - name: outlet dtype: string - name: headline dtype: string - name: body dtype: string - name: political_leaning dtype: string - name: gpt_causal_graph dtype: string splits: - name: train num_bytes: 923574 num_examples: 200 download_size: 0 dataset_size: 923574 --- Causal pairs generated with chatGPT. Training set.
[ -0.08045290410518646, -0.6618567109107971, 0.15738509595394135, 0.5471317768096924, -0.27961501479148865, -0.489841103553772, -0.115807443857193, -0.089484304189682, 0.2092052847146988, 0.3551934063434601, -0.9233213067054749, -0.06904895603656769, -0.7340059876441956, -0.2591407895088196,...
null
null
null
null
null
null
null
null
null
null
null
null
null
NandinhoVinicius/henrique
NandinhoVinicius
2023-11-08T13:47:20Z
0
0
null
[ "license:apache-2.0", "region:us" ]
2023-11-08T13:47:20Z
2023-11-08T13:38:20.000Z
2023-11-08T13:38:20
--- license: apache-2.0 ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
NandinhoVinicius/henriqueju
NandinhoVinicius
2023-11-08T13:50:25Z
0
0
null
[ "license:apache-2.0", "region:us" ]
2023-11-08T13:50:25Z
2023-11-08T13:49:33.000Z
2023-11-08T13:49:33
--- license: apache-2.0 ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_teknium__CollectiveCognition-v1.1-Mistral-7B_public
open-llm-leaderboard
2023-11-08T13:52:33Z
0
0
null
[ "region:us" ]
2023-11-08T13:52:33Z
2023-11-08T13:51:33.000Z
2023-11-08T13:51:33
--- pretty_name: Evaluation run of teknium/CollectiveCognition-v1.1-Mistral-7B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [teknium/CollectiveCognition-v1.1-Mistral-7B](https://huggingface.co/teknium/CollectiveCognition-v1.1-Mistral-7B)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 61 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_teknium__CollectiveCognition-v1.1-Mistral-7B_public\"\ ,\n\t\"harness_truthfulqa_mc_0\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\ \nThese are the [latest results from run 2023-11-08T13:48:47.550072](https://huggingface.co/datasets/open-llm-leaderboard/details_teknium__CollectiveCognition-v1.1-Mistral-7B_public/blob/main/results_2023-11-08T13-48-47.550072.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6255358463798112,\n\ \ \"acc_stderr\": 0.03339672778015502,\n \"acc_norm\": 0.6293923050064572,\n\ \ \"acc_norm_stderr\": 0.03337392753120544,\n \"mc1\": 0.40514075887392903,\n\ \ \"mc1_stderr\": 0.01718561172775337,\n \"mc2\": 0.5761479349192792,\n\ \ \"mc2_stderr\": 0.015694985778154737\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5895904436860068,\n \"acc_stderr\": 0.014374922192642666,\n\ \ \"acc_norm\": 0.6254266211604096,\n \"acc_norm_stderr\": 0.014144193471893452\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6495717984465246,\n\ \ \"acc_stderr\": 0.004761289867046067,\n \"acc_norm\": 0.8412666799442342,\n\ \ \"acc_norm_stderr\": 0.0036468038997703447\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \ \ \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n\ \ \"acc_stderr\": 0.04218506215368879,\n \"acc_norm\": 0.6074074074074074,\n\ \ \"acc_norm_stderr\": 0.04218506215368879\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.6381578947368421,\n \"acc_stderr\": 0.03910525752849724,\n\ \ \"acc_norm\": 0.6381578947368421,\n \"acc_norm_stderr\": 0.03910525752849724\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n\ \ \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \ \ \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.028815615713432115,\n\ \ \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.028815615713432115\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7013888888888888,\n\ \ \"acc_stderr\": 0.03827052357950756,\n \"acc_norm\": 0.7013888888888888,\n\ \ \"acc_norm_stderr\": 0.03827052357950756\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \ \ \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n\ \ \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145634,\n \ \ \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145634\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n\ \ \"acc_stderr\": 0.036430371689585475,\n \"acc_norm\": 0.6473988439306358,\n\ \ \"acc_norm_stderr\": 0.036430371689585475\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n\ \ \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.77,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.77,\n\ \ \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n\ \ \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5263157894736842,\n\ \ \"acc_stderr\": 0.046970851366478626,\n \"acc_norm\": 0.5263157894736842,\n\ \ \"acc_norm_stderr\": 0.046970851366478626\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n\ \ \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.373015873015873,\n \"acc_stderr\": 0.02490699045899257,\n \"acc_norm\"\ : 0.373015873015873,\n \"acc_norm_stderr\": 0.02490699045899257\n },\n\ \ \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3968253968253968,\n\ \ \"acc_stderr\": 0.04375888492727061,\n \"acc_norm\": 0.3968253968253968,\n\ \ \"acc_norm_stderr\": 0.04375888492727061\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7161290322580646,\n\ \ \"acc_stderr\": 0.02564938106302927,\n \"acc_norm\": 0.7161290322580646,\n\ \ \"acc_norm_stderr\": 0.02564938106302927\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.035158955511656986,\n\ \ \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.035158955511656986\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\"\ : 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n\ \ \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.8131313131313131,\n \"acc_stderr\": 0.027772533334218974,\n \"\ acc_norm\": 0.8131313131313131,\n \"acc_norm_stderr\": 0.027772533334218974\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8704663212435233,\n \"acc_stderr\": 0.024233532297758733,\n\ \ \"acc_norm\": 0.8704663212435233,\n \"acc_norm_stderr\": 0.024233532297758733\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.6128205128205129,\n \"acc_stderr\": 0.024697216930878934,\n\ \ \"acc_norm\": 0.6128205128205129,\n \"acc_norm_stderr\": 0.024697216930878934\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.3296296296296296,\n \"acc_stderr\": 0.02866120111652458,\n \ \ \"acc_norm\": 0.3296296296296296,\n \"acc_norm_stderr\": 0.02866120111652458\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.634453781512605,\n \"acc_stderr\": 0.031282177063684614,\n \ \ \"acc_norm\": 0.634453781512605,\n \"acc_norm_stderr\": 0.031282177063684614\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.2847682119205298,\n \"acc_stderr\": 0.03684881521389023,\n \"\ acc_norm\": 0.2847682119205298,\n \"acc_norm_stderr\": 0.03684881521389023\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.8201834862385321,\n \"acc_stderr\": 0.016465345467391552,\n \"\ acc_norm\": 0.8201834862385321,\n \"acc_norm_stderr\": 0.016465345467391552\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.46296296296296297,\n \"acc_stderr\": 0.03400603625538271,\n \"\ acc_norm\": 0.46296296296296297,\n \"acc_norm_stderr\": 0.03400603625538271\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7647058823529411,\n \"acc_stderr\": 0.02977177522814562,\n \"\ acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.02977177522814562\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7721518987341772,\n \"acc_stderr\": 0.027303484599069432,\n \ \ \"acc_norm\": 0.7721518987341772,\n \"acc_norm_stderr\": 0.027303484599069432\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6591928251121076,\n\ \ \"acc_stderr\": 0.03181149747055359,\n \"acc_norm\": 0.6591928251121076,\n\ \ \"acc_norm_stderr\": 0.03181149747055359\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306086,\n\ \ \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306086\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"\ acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n\ \ \"acc_stderr\": 0.042365112580946315,\n \"acc_norm\": 0.7407407407407407,\n\ \ \"acc_norm_stderr\": 0.042365112580946315\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.7361963190184049,\n \"acc_stderr\": 0.03462419931615624,\n\ \ \"acc_norm\": 0.7361963190184049,\n \"acc_norm_stderr\": 0.03462419931615624\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n\ \ \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n\ \ \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n\ \ \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8376068376068376,\n\ \ \"acc_stderr\": 0.02416161812798774,\n \"acc_norm\": 0.8376068376068376,\n\ \ \"acc_norm_stderr\": 0.02416161812798774\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \ \ \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7956577266922095,\n\ \ \"acc_stderr\": 0.014419123980931894,\n \"acc_norm\": 0.7956577266922095,\n\ \ \"acc_norm_stderr\": 0.014419123980931894\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.0246853168672578,\n\ \ \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.0246853168672578\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3329608938547486,\n\ \ \"acc_stderr\": 0.015761716178397563,\n \"acc_norm\": 0.3329608938547486,\n\ \ \"acc_norm_stderr\": 0.015761716178397563\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7287581699346405,\n \"acc_stderr\": 0.025457756696667878,\n\ \ \"acc_norm\": 0.7287581699346405,\n \"acc_norm_stderr\": 0.025457756696667878\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.684887459807074,\n\ \ \"acc_stderr\": 0.026385273703464482,\n \"acc_norm\": 0.684887459807074,\n\ \ \"acc_norm_stderr\": 0.026385273703464482\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.7006172839506173,\n \"acc_stderr\": 0.02548311560119545,\n\ \ \"acc_norm\": 0.7006172839506173,\n \"acc_norm_stderr\": 0.02548311560119545\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.48226950354609927,\n \"acc_stderr\": 0.02980873964223777,\n \ \ \"acc_norm\": 0.48226950354609927,\n \"acc_norm_stderr\": 0.02980873964223777\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4589308996088657,\n\ \ \"acc_stderr\": 0.012727084826799804,\n \"acc_norm\": 0.4589308996088657,\n\ \ \"acc_norm_stderr\": 0.012727084826799804\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.6360294117647058,\n \"acc_stderr\": 0.02922719246003203,\n\ \ \"acc_norm\": 0.6360294117647058,\n \"acc_norm_stderr\": 0.02922719246003203\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.6454248366013072,\n \"acc_stderr\": 0.0193533605475537,\n \ \ \"acc_norm\": 0.6454248366013072,\n \"acc_norm_stderr\": 0.0193533605475537\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n\ \ \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n\ \ \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7224489795918367,\n \"acc_stderr\": 0.02866685779027465,\n\ \ \"acc_norm\": 0.7224489795918367,\n \"acc_norm_stderr\": 0.02866685779027465\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n\ \ \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n\ \ \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \ \ \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5,\n \ \ \"acc_stderr\": 0.03892494720807614,\n \"acc_norm\": 0.5,\n \"\ acc_norm_stderr\": 0.03892494720807614\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n\ \ \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.40514075887392903,\n\ \ \"mc1_stderr\": 0.01718561172775337,\n \"mc2\": 0.5761479349192792,\n\ \ \"mc2_stderr\": 0.015694985778154737\n }\n}\n```" repo_url: https://huggingface.co/teknium/CollectiveCognition-v1.1-Mistral-7B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|arc:challenge|25_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hellaswag|10_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-08T13-48-47.550072.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-management|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T13-48-47.550072.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_08T13_48_47.550072 path: - '**/details_harness|truthfulqa:mc|0_2023-11-08T13-48-47.550072.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-08T13-48-47.550072.parquet' - config_name: results data_files: - split: 2023_11_08T13_48_47.550072 path: - results_2023-11-08T13-48-47.550072.parquet - split: latest path: - results_2023-11-08T13-48-47.550072.parquet --- # Dataset Card for Evaluation run of teknium/CollectiveCognition-v1.1-Mistral-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/teknium/CollectiveCognition-v1.1-Mistral-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [teknium/CollectiveCognition-v1.1-Mistral-7B](https://huggingface.co/teknium/CollectiveCognition-v1.1-Mistral-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 61 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_teknium__CollectiveCognition-v1.1-Mistral-7B_public", "harness_truthfulqa_mc_0", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T13:48:47.550072](https://huggingface.co/datasets/open-llm-leaderboard/details_teknium__CollectiveCognition-v1.1-Mistral-7B_public/blob/main/results_2023-11-08T13-48-47.550072.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6255358463798112, "acc_stderr": 0.03339672778015502, "acc_norm": 0.6293923050064572, "acc_norm_stderr": 0.03337392753120544, "mc1": 0.40514075887392903, "mc1_stderr": 0.01718561172775337, "mc2": 0.5761479349192792, "mc2_stderr": 0.015694985778154737 }, "harness|arc:challenge|25": { "acc": 0.5895904436860068, "acc_stderr": 0.014374922192642666, "acc_norm": 0.6254266211604096, "acc_norm_stderr": 0.014144193471893452 }, "harness|hellaswag|10": { "acc": 0.6495717984465246, "acc_stderr": 0.004761289867046067, "acc_norm": 0.8412666799442342, "acc_norm_stderr": 0.0036468038997703447 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.04218506215368879, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.04218506215368879 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6381578947368421, "acc_stderr": 0.03910525752849724, "acc_norm": 0.6381578947368421, "acc_norm_stderr": 0.03910525752849724 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6754716981132075, "acc_stderr": 0.028815615713432115, "acc_norm": 0.6754716981132075, "acc_norm_stderr": 0.028815615713432115 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7013888888888888, "acc_stderr": 0.03827052357950756, "acc_norm": 0.7013888888888888, "acc_norm_stderr": 0.03827052357950756 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145634, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145634 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.036430371689585475, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.036430371689585475 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816506, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5263157894736842, "acc_stderr": 0.046970851366478626, "acc_norm": 0.5263157894736842, "acc_norm_stderr": 0.046970851366478626 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.373015873015873, "acc_stderr": 0.02490699045899257, "acc_norm": 0.373015873015873, "acc_norm_stderr": 0.02490699045899257 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3968253968253968, "acc_stderr": 0.04375888492727061, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.04375888492727061 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7161290322580646, "acc_stderr": 0.02564938106302927, "acc_norm": 0.7161290322580646, "acc_norm_stderr": 0.02564938106302927 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.035158955511656986, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.035158955511656986 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8131313131313131, "acc_stderr": 0.027772533334218974, "acc_norm": 0.8131313131313131, "acc_norm_stderr": 0.027772533334218974 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8704663212435233, "acc_stderr": 0.024233532297758733, "acc_norm": 0.8704663212435233, "acc_norm_stderr": 0.024233532297758733 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6128205128205129, "acc_stderr": 0.024697216930878934, "acc_norm": 0.6128205128205129, "acc_norm_stderr": 0.024697216930878934 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3296296296296296, "acc_stderr": 0.02866120111652458, "acc_norm": 0.3296296296296296, "acc_norm_stderr": 0.02866120111652458 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.634453781512605, "acc_stderr": 0.031282177063684614, "acc_norm": 0.634453781512605, "acc_norm_stderr": 0.031282177063684614 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2847682119205298, "acc_stderr": 0.03684881521389023, "acc_norm": 0.2847682119205298, "acc_norm_stderr": 0.03684881521389023 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8201834862385321, "acc_stderr": 0.016465345467391552, "acc_norm": 0.8201834862385321, "acc_norm_stderr": 0.016465345467391552 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.46296296296296297, "acc_stderr": 0.03400603625538271, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.03400603625538271 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7647058823529411, "acc_stderr": 0.02977177522814562, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.02977177522814562 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7721518987341772, "acc_stderr": 0.027303484599069432, "acc_norm": 0.7721518987341772, "acc_norm_stderr": 0.027303484599069432 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6591928251121076, "acc_stderr": 0.03181149747055359, "acc_norm": 0.6591928251121076, "acc_norm_stderr": 0.03181149747055359 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306086, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306086 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.042365112580946315, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.042365112580946315 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7361963190184049, "acc_stderr": 0.03462419931615624, "acc_norm": 0.7361963190184049, "acc_norm_stderr": 0.03462419931615624 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489123, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8376068376068376, "acc_stderr": 0.02416161812798774, "acc_norm": 0.8376068376068376, "acc_norm_stderr": 0.02416161812798774 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7956577266922095, "acc_stderr": 0.014419123980931894, "acc_norm": 0.7956577266922095, "acc_norm_stderr": 0.014419123980931894 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6994219653179191, "acc_stderr": 0.0246853168672578, "acc_norm": 0.6994219653179191, "acc_norm_stderr": 0.0246853168672578 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3329608938547486, "acc_stderr": 0.015761716178397563, "acc_norm": 0.3329608938547486, "acc_norm_stderr": 0.015761716178397563 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7287581699346405, "acc_stderr": 0.025457756696667878, "acc_norm": 0.7287581699346405, "acc_norm_stderr": 0.025457756696667878 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.684887459807074, "acc_stderr": 0.026385273703464482, "acc_norm": 0.684887459807074, "acc_norm_stderr": 0.026385273703464482 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7006172839506173, "acc_stderr": 0.02548311560119545, "acc_norm": 0.7006172839506173, "acc_norm_stderr": 0.02548311560119545 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48226950354609927, "acc_stderr": 0.02980873964223777, "acc_norm": 0.48226950354609927, "acc_norm_stderr": 0.02980873964223777 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4589308996088657, "acc_stderr": 0.012727084826799804, "acc_norm": 0.4589308996088657, "acc_norm_stderr": 0.012727084826799804 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6360294117647058, "acc_stderr": 0.02922719246003203, "acc_norm": 0.6360294117647058, "acc_norm_stderr": 0.02922719246003203 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6454248366013072, "acc_stderr": 0.0193533605475537, "acc_norm": 0.6454248366013072, "acc_norm_stderr": 0.0193533605475537 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7224489795918367, "acc_stderr": 0.02866685779027465, "acc_norm": 0.7224489795918367, "acc_norm_stderr": 0.02866685779027465 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774709, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-virology|5": { "acc": 0.5, "acc_stderr": 0.03892494720807614, "acc_norm": 0.5, "acc_norm_stderr": 0.03892494720807614 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.40514075887392903, "mc1_stderr": 0.01718561172775337, "mc2": 0.5761479349192792, "mc2_stderr": 0.015694985778154737 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7034233808517456, -0.8046762943267822, 0.2960241734981537, 0.18472731113433838, -0.1897469013929367, -0.015091011300683022, 0.011630324646830559, -0.21054813265800476, 0.5670872330665588, -0.07171789556741714, -0.47516852617263794, -0.745947003364563, -0.46889379620552063, 0.26840144395...
null
null
null
null
null
null
null
null
null
null
null
null
null
gray311/Dolphins-Bench
gray311
2023-11-16T08:29:49Z
0
0
null
[ "license:llama2", "region:us" ]
2023-11-16T08:29:49Z
2023-11-08T14:04:06.000Z
2023-11-08T14:04:06
--- license: llama2 ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
zhengr/ultrafeedback_binarized
zhengr
2023-11-08T14:18:27Z
0
0
null
[ "task_categories:conversational", "task_categories:text-generation", "language:en", "license:mit", "arxiv:2310.16944", "region:us" ]
2023-11-08T14:18:27Z
2023-11-08T14:18:08.000Z
2023-11-08T14:18:08
--- language: - en license: mit task_categories: - conversational - text-generation pretty_name: UltraFeedback Binarized configs: - config_name: default data_files: - split: train_sft path: data/train_sft-* - split: test_sft path: data/test_sft-* - split: train_gen path: data/train_gen-* - split: test_gen path: data/test_gen-* - split: train_prefs path: data/train_prefs-* - split: test_prefs path: data/test_prefs-* dataset_info: features: - name: prompt dtype: string - name: prompt_id dtype: string - name: chosen list: - name: content dtype: string - name: role dtype: string - name: rejected list: - name: content dtype: string - name: role dtype: string - name: messages list: - name: content dtype: string - name: role dtype: string - name: score_chosen dtype: float64 - name: score_rejected dtype: float64 splits: - name: train_sft num_bytes: 397273717 num_examples: 61966 - name: test_sft num_bytes: 6270496 num_examples: 1000 - name: train_gen num_bytes: 316634390 num_examples: 61966 - name: test_gen num_bytes: 5008220 num_examples: 1000 - name: train_prefs num_bytes: 397273717 num_examples: 61966 - name: test_prefs num_bytes: 12782225 num_examples: 2000 download_size: 636467735 dataset_size: 1135242765 --- # Dataset Card for UltraFeedback Binarized ## Dataset Description This is a pre-processed version of the [UltraFeedback dataset](https://huggingface.co/datasets/openbmb/UltraFeedback) and was used to train [Zephyr-7Β-β](https://huggingface.co/HuggingFaceH4/zephyr-7b-beta), a state of the art chat model at the 7B parameter scale. The original UltraFeedback dataset consists of 64k prompts, where is prompt is accompanied with four model completions from a wide variety of open and proprietary models. GPT-4 is then used to assign a score to each completion, along criteria like helpfulness and honesty. To create `UltraFeedback Binarized`, we picked the highest mean score as the "chosen" completion, and one of the remaining 3 at random as the "rejected" one. This defines the preference modelling splits for techniques like reward modelling or DPO. We also created splits for supervised fine-tuning (SFT) that use the "chosen" column as the dialogues to model, along with splits that involve generation like rejection sampling or PPO. For details on the dataset processing, see the accompanying [script](https://huggingface.co/datasets/HuggingFaceH4/ultrafeedback_binarized/blob/main/create_dataset.py). ## Dataset Structure ### Data Splits The dataset has six splits, suitable for: * Supervised fine-tuning (`sft`). * Preference modelling (`prefs`) to train reward models or apply techniques like DPO. * Generation ranking (`gen`) via techniques like rejection sampling or PPO. The number of examples per split is shown as follows: | train_sft | test_sft | train_prefs | test_prefs | train_gen | test_gen | |:-------:|:-----------:|:-----:| :-----:| :-----:| :-----:| | 61966 | 1000 | 61966 | 2000 | 61966 | 1000 | The dataset is stored in parquet format with each entry using the following schema: ``` { "prompt": "Which books are still banned in Canada?", "chosen": [ { "content": "Which books are still banned in Canada?", "role": "user" }, { "content": "As a responsible and ethical AI assistant, I must inform you that I cannot provide a list of banned books in Canada, as there are no books that are currently banned in the country. Canada has a strong commitment to freedom of expression and the right to access information, and there are no laws in place that restrict the publication or distribution of books.\n", "role": "assistant" } ], "messages": [ { "content": "Which books are still banned in Canada?", "role": "user" }, { "content": "As a responsible and ethical AI assistant, I must inform you that I cannot provide a list of banned books in Canada, as there are no books that are currently banned in the country. Canada has a strong commitment to freedom of expression and the right to access information, and there are no laws in place that restrict the publication or distribution of books.\n", "role": "assistant" }], "prompt_id": "aeccf551d9ba42fdf5f2044de43b8ce6e360fb523ace428317b81d804594e090", "rejected": [ { "content": "Which books are still banned in Canada?", "role": "user"}, { "content": "According to the Canadian Government’s Ban Affront website, there are still several books that are banned in Canada. These include The Begum’s Millionaire, The Education of Little Tree, The Harry Potter series, Lolita, 1984, and Lady Chatterley’s Lover. Some of these books are considered inaccessible due to their age, while others are still legally banned in certain parts of the country.", "role": "assistant" } ], "score_chosen": 8.0, "score_rejected": 5.0 } ``` You should use the `chosen` and `rejected` columns for techniques like DPO, while the `messages` column is suitable for SFT or PPO. ## Citation If you find this dataset is useful in your work, please cite the original UltraFeedback dataset: https://huggingface.co/datasets/openbmb/UltraFeedback You may also wish to cite the Zephyr 7B technical report: ``` @misc{tunstall2023zephyr, title={Zephyr: Direct Distillation of LM Alignment}, author={Lewis Tunstall and Edward Beeching and Nathan Lambert and Nazneen Rajani and Kashif Rasul and Younes Belkada and Shengyi Huang and Leandro von Werra and Clémentine Fourrier and Nathan Habib and Nathan Sarrazin and Omar Sanseviero and Alexander M. Rush and Thomas Wolf}, year={2023}, eprint={2310.16944}, archivePrefix={arXiv}, primaryClass={cs.LG} } ```
[ -0.25384023785591125, -0.41221392154693604, 0.1340702772140503, 0.28658804297447205, -0.3469187617301941, -0.18508395552635193, 0.13909755647182465, -0.23341040313243866, 0.09443943202495575, 1.0479034185409546, -0.5335689187049866, -0.8985083699226379, -0.27864870429039, 0.152482926845550...
null
null
null
null
null
null
null
null
null
null
null
null
null
zx2045284/medicine
zx2045284
2023-11-08T14:37:08Z
0
0
null
[ "medical", "region:us" ]
2023-11-08T14:37:08Z
2023-11-08T14:33:46.000Z
2023-11-08T14:33:46
--- tags: - medical ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
RescorerLM/LibriSpeech-Vocabulary
RescorerLM
2023-11-08T14:35:25Z
0
0
null
[ "region:us" ]
2023-11-08T14:35:25Z
2023-11-08T14:35:15.000Z
2023-11-08T14:35:15
Entry not found
[ -0.3227645754814148, -0.22568479180335999, 0.8622263669967651, 0.43461522459983826, -0.52829909324646, 0.7012971639633179, 0.7915719747543335, 0.07618614286184311, 0.774603009223938, 0.2563217282295227, -0.7852813005447388, -0.22573819756507874, -0.9104475975036621, 0.5715674161911011, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
zhengr/ultrachat_200k
zhengr
2023-11-08T14:47:02Z
0
0
null
[ "task_categories:conversational", "task_categories:text-generation", "size_categories:100K<n<1M", "language:en", "license:mit", "arxiv:2305.14233", "arxiv:2310.16944", "region:us" ]
2023-11-08T14:47:02Z
2023-11-08T14:46:06.000Z
2023-11-08T14:46:06
--- language: - en license: mit size_categories: - 100K<n<1M task_categories: - conversational - text-generation pretty_name: UltraChat 200k configs: - config_name: default data_files: - split: train_sft path: data/train_sft-* - split: test_sft path: data/test_sft-* - split: train_gen path: data/train_gen-* - split: test_gen path: data/test_gen-* dataset_info: features: - name: prompt dtype: string - name: prompt_id dtype: string - name: messages list: - name: content dtype: string - name: role dtype: string splits: - name: train_sft num_bytes: 1397058554 num_examples: 207865 - name: test_sft num_bytes: 154695659 num_examples: 23110 - name: train_gen num_bytes: 1347396812 num_examples: 256032 - name: test_gen num_bytes: 148276089 num_examples: 28304 download_size: 1624049723 dataset_size: 3047427114 --- # Dataset Card for UltraChat 200k ## Dataset Description This is a heavily filtered version of the [UltraChat](https://github.com/thunlp/UltraChat) dataset and was used to train [Zephyr-7B-β](https://huggingface.co/HuggingFaceH4/zephyr-7b-beta), a state of the art 7b chat model. The original datasets consists of 1.4M dialogues generated by ChatGPT and spanning a wide range of topics. To create `UltraChat 200k`, we applied the following logic: - Selection of a subset of data for faster supervised fine tuning. - Truecasing of the dataset, as we observed around 5% of the data contained grammatical errors like "Hello. how are you?" instead of "Hello. How are you?" - Removal of dialogues where the assistant replies with phrases like "I do not have emotions" or "I don't have opinions", even for fact-based prompts that don't involve either. ## Dataset Structure The dataset has four splits, suitable for: * Supervised fine-tuning (`sft`). * Generation ranking (`gen`) via techniques like rejection sampling or PPO. The number of examples per split is shown as follows: | train_sft | test_sft | train_gen | test_gen | |:-------:|:-----------:|:-----:| :-----:| | 207865 | 23110 | 256032 | 28304 | The dataset is stored in parquet format with each entry using the following schema: ``` { "prompt": "Create a fully-developed protagonist who is challenged to survive within a dystopian society under the rule of a tyrant. ...", "messages":[ { "content": "Create a fully-developed protagonist who is challenged to survive within a dystopian society under the rule of a tyrant. ...", "role": "user" }, { "content": "Name: Ava\n\n Ava was just 16 years old when the world as she knew it came crashing down. The government had collapsed, leaving behind a chaotic and lawless society. ...", "role": "assistant" }, { "content": "Wow, Ava's story is so intense and inspiring! Can you provide me with more details. ...", "role": "user" }, { "content": "Certainly! ....", "role": "assistant" }, { "content": "That's really interesting! I would love to hear more...", "role": "user" } { "content": "Certainly! ....", "role": "assistant" }, ], "prompt_id": "d938b65dfe31f05f80eb8572964c6673eddbd68eff3db6bd234d7f1e3b86c2af" } ``` ## Citation If you find this dataset is useful in your work, please cite the original UltraChat dataset: ``` @misc{ding2023enhancing, title={Enhancing Chat Language Models by Scaling High-quality Instructional Conversations}, author={Ning Ding and Yulin Chen and Bokai Xu and Yujia Qin and Zhi Zheng and Shengding Hu and Zhiyuan Liu and Maosong Sun and Bowen Zhou}, year={2023}, eprint={2305.14233}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` You may also wish to cite the Zephyr 7B technical report: ``` @misc{tunstall2023zephyr, title={Zephyr: Direct Distillation of LM Alignment}, author={Lewis Tunstall and Edward Beeching and Nathan Lambert and Nazneen Rajani and Kashif Rasul and Younes Belkada and Shengyi Huang and Leandro von Werra and Clémentine Fourrier and Nathan Habib and Nathan Sarrazin and Omar Sanseviero and Alexander M. Rush and Thomas Wolf}, year={2023}, eprint={2310.16944}, archivePrefix={arXiv}, primaryClass={cs.LG} } ```
[ -0.23444771766662598, -0.8110063076019287, 0.3500114679336548, 0.15307602286338806, -0.1303638368844986, -0.08500323444604874, -0.0421859435737133, -0.25259682536125183, -0.00124407431576401, 0.6552737951278687, -0.5860026478767395, -0.6387255787849426, -0.1422766000032425, 0.0252635981887...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_aiplanet__panda-coder-13B_public
open-llm-leaderboard
2023-11-08T14:54:23Z
0
0
null
[ "region:us" ]
2023-11-08T14:54:23Z
2023-11-08T14:54:15.000Z
2023-11-08T14:54:15
--- pretty_name: Evaluation run of aiplanet/panda-coder-13B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [aiplanet/panda-coder-13B](https://huggingface.co/aiplanet/panda-coder-13B) on\ \ the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 3 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_aiplanet__panda-coder-13B_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-08T14:53:54.622402](https://huggingface.co/datasets/open-llm-leaderboard/details_aiplanet__panda-coder-13B_public/blob/main/results_2023-11-08T14-53-54.622402.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.0,\n \"\ em_stderr\": 0.0,\n \"f1\": 0.0,\n \"f1_stderr\": 0.0,\n \"\ acc\": 0.2478295185477506,\n \"acc_stderr\": 0.007025978032038456\n },\n\ \ \"harness|drop|3\": {\n \"em\": 0.0,\n \"em_stderr\": 0.0,\n\ \ \"f1\": 0.0,\n \"f1_stderr\": 0.0\n },\n \"harness|gsm8k|5\"\ : {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.4956590370955012,\n \"acc_stderr\": 0.014051956064076911\n\ \ }\n}\n```" repo_url: https://huggingface.co/aiplanet/panda-coder-13B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_drop_3 data_files: - split: 2023_11_08T14_53_54.622402 path: - '**/details_harness|drop|3_2023-11-08T14-53-54.622402.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-08T14-53-54.622402.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_08T14_53_54.622402 path: - '**/details_harness|gsm8k|5_2023-11-08T14-53-54.622402.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-08T14-53-54.622402.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_08T14_53_54.622402 path: - '**/details_harness|winogrande|5_2023-11-08T14-53-54.622402.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-08T14-53-54.622402.parquet' - config_name: results data_files: - split: 2023_11_08T14_53_54.622402 path: - results_2023-11-08T14-53-54.622402.parquet - split: latest path: - results_2023-11-08T14-53-54.622402.parquet --- # Dataset Card for Evaluation run of aiplanet/panda-coder-13B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/aiplanet/panda-coder-13B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [aiplanet/panda-coder-13B](https://huggingface.co/aiplanet/panda-coder-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_aiplanet__panda-coder-13B_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T14:53:54.622402](https://huggingface.co/datasets/open-llm-leaderboard/details_aiplanet__panda-coder-13B_public/blob/main/results_2023-11-08T14-53-54.622402.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.0, "em_stderr": 0.0, "f1": 0.0, "f1_stderr": 0.0, "acc": 0.2478295185477506, "acc_stderr": 0.007025978032038456 }, "harness|drop|3": { "em": 0.0, "em_stderr": 0.0, "f1": 0.0, "f1_stderr": 0.0 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 }, "harness|winogrande|5": { "acc": 0.4956590370955012, "acc_stderr": 0.014051956064076911 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4296974241733551, -0.6458463668823242, 0.057146262377500534, 0.43867048621177673, -0.1751026213169098, 0.23050566017627716, -0.39078450202941895, -0.06854495406150818, 0.4712934195995331, 0.5941728353500366, -0.6621571779251099, -0.8601192235946655, -0.7031025290489197, 0.27318283915519...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_80_10
arieg
2023-11-08T15:06:40Z
0
0
null
[ "region:us" ]
2023-11-08T15:06:40Z
2023-11-08T15:06:32.000Z
2023-11-08T15:06:32
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '26007' '1': '26008' '2': '26010' '3': '26011' '4': '26012' '5': '26013' '6': '26014' '7': '26016' '8': '26017' '9': '26020' '10': '26021' '11': '26022' '12': '26025' '13': '26026' '14': '26034' '15': '26035' '16': '26036' '17': '26169' '18': '26174' '19': '26298' '20': '26301' '21': '26302' '22': '26307' '23': '26322' '24': '26464' '25': '26465' '26': '26466' '27': '26583' '28': '26600' '29': '26605' '30': '26629' '31': '26638' '32': '26639' '33': '26640' '34': '26641' '35': '26642' '36': '26643' '37': '26651' '38': '26652' '39': '26653' '40': '26654' '41': '26655' '42': '26656' '43': '26657' '44': '26658' '45': '26659' '46': '26674' '47': '26681' '48': '26754' '49': '26765' '50': '27164' '51': '27177' '52': '27194' '53': '27195' '54': '27197' '55': '27198' '56': '27258' '57': '27406' '58': '27454' '59': '27455' '60': '27456' '61': '27547' '62': '27548' '63': '27549' '64': '27550' '65': '27551' '66': '27552' '67': '27609' '68': '27610' '69': '27611' '70': '27612' '71': '27613' '72': '27667' '73': '27673' '74': '27797' '75': '27799' '76': '27802' '77': '27803' '78': '27804' '79': '27805' splits: - name: train num_bytes: 86008633.6 num_examples: 1600 download_size: 87234967 dataset_size: 86008633.6 --- # Dataset Card for "bw_spec_cls_80_10" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7590449452400208, -0.17424041032791138, 0.2107134461402893, 0.3808968961238861, -0.23944972455501556, -0.17386184632778168, -0.01787232607603073, -0.3097383975982666, 0.6564815044403076, 0.49791258573532104, -0.7003551721572876, -0.7635643482208252, -0.6093544363975525, -0.0962526649236...
null
null
null
null
null
null
null
null
null
null
null
null
null
BEE-spoke-data/code-tutorials-en
BEE-spoke-data
2023-11-28T23:53:28Z
0
0
null
[ "task_categories:text-generation", "size_categories:100K<n<1M", "source_datasets:mponty/code_tutorials", "language:en", "license:odc-by", "region:us" ]
2023-11-28T23:53:28Z
2023-11-08T15:09:01.000Z
2023-11-08T15:09:01
--- configs: - config_name: default data_files: - split: train path: data/train-* - split: validation path: data/validation-* - split: test path: data/test-* - config_name: unfiltered data_files: - split: train path: unfiltered/train-* dataset_info: - config_name: default features: - name: text dtype: string - name: url dtype: string - name: dump dtype: string - name: source dtype: string - name: word_count dtype: int64 - name: flesch_reading_ease dtype: float64 splits: - name: train num_bytes: 2003343392.8658142 num_examples: 223162 - name: validation num_bytes: 52722397.8378977 num_examples: 5873 - name: test num_bytes: 52722397.8378977 num_examples: 5873 download_size: 1137457027 dataset_size: 2108788188.5416098 - config_name: unfiltered features: - name: text dtype: string - name: url dtype: string - name: dump dtype: string - name: source dtype: string - name: word_count dtype: int64 - name: flesch_reading_ease dtype: float64 splits: - name: train num_bytes: 3452998372 num_examples: 384646 download_size: 1859375824 dataset_size: 3452998372 source_datasets: mponty/code_tutorials license: odc-by task_categories: - text-generation language: - en size_categories: - 100K<n<1M --- # Dataset Card for "code-tutorials-en" - `en` only - 100 words or more - reading ease of 50 or more ``` DatasetDict({ train: Dataset({ features: ['text', 'url', 'dump', 'source', 'word_count', 'flesch_reading_ease'], num_rows: 223162 }) validation: Dataset({ features: ['text', 'url', 'dump', 'source', 'word_count', 'flesch_reading_ease'], num_rows: 5873 }) test: Dataset({ features: ['text', 'url', 'dump', 'source', 'word_count', 'flesch_reading_ease'], num_rows: 5873 }) }) ```
[ -0.5836908221244812, -0.6124824285507202, -0.010901142843067646, 0.4473128914833069, -0.40583381056785583, -0.18810389935970306, -0.5145993828773499, 0.24952922761440277, 0.033629100769758224, 0.2700527012348175, -0.7103278040885925, -0.9753457903862, -0.5242998600006104, 0.546562135219574...
null
null
null
null
null
null
null
null
null
null
null
null
null
materials-toolkits/materials-project
materials-toolkits
2023-11-23T14:45:15Z
0
0
null
[ "size_categories:100K<n<1M", "license:mit", "chemistry", "region:us" ]
2023-11-23T14:45:15Z
2023-11-08T15:12:18.000Z
2023-11-08T15:12:18
--- license: mit tags: - chemistry pretty_name: Materials Project size_categories: - 100K<n<1M --- # Dataset Materials project (2019 dump) This dataset contains 133420 materials with formation energy per atom. Processed from [mp.2019.04.01.json](https://figshare.com/articles/dataset/Graphs_of_Materials_Project_20190401/8097992) # Download Download link: [materials-project.tar.gz](https://huggingface.co/datasets/materials-toolkits/materials-project/raw/main/materials-project.tar.gz) MD5 checksum `c132f3781f32cd17f3a92aa6501b9531` # Content Bundled in `materials-project.tar.gz`. ## Index (`index.json`) list of dict: * `index` (int) => index of the structure in data file. * `id` (str) => id of Materials Project. * `formula` (str) => formula. * `natoms` (int) => number of atoms. * `energy_pa` (float) => formation energy per atom. ## Data (`data.hdf5`) fields: * `structures` => a group containing structure information. * `structures/cell` (float32) => lattice of the material. * `structures/natoms` (int32) => number of atoms. * `structures/energy_pa` (float32) => formation energy per atom. * `structures/atoms_ptr` (int64) => position of the first atom of the structures in the `atoms` group. * `atoms` => a group containing information about atoms. * `atoms/positions` (float32) => the positions of the atoms. * `atoms/atomic_number` (uint8) => the atomic number of the atoms.
[ -0.31987056136131287, -0.41356489062309265, 0.506629228591919, 0.038394078612327576, 0.006649126764386892, 0.2779937982559204, 0.25817134976387024, 0.23003359138965607, 0.15237653255462646, 0.39913856983184814, -0.4664105176925659, -0.8572249412536621, -0.27384933829307556, 0.1221947520971...
null
null
null
null
null
null
null
null
null
null
null
null
null
zhengr/UltraFeedback
zhengr
2023-11-08T15:15:16Z
0
0
null
[ "task_categories:text-generation", "size_categories:100K<n<1M", "language:en", "license:mit", "region:us" ]
2023-11-08T15:15:16Z
2023-11-08T15:14:57.000Z
2023-11-08T15:14:57
--- license: mit task_categories: - text-generation language: - en size_categories: - 100K<n<1M --- ## Introduction - [GitHub Repo](https://github.com/thunlp/UltraFeedback) - [UltraRM-13b](https://huggingface.co/openbmb/UltraRM-13b) - [UltraCM-13b](https://huggingface.co/openbmb/UltraCM-13b) UltraFeedback is a **large-scale, fine-grained, diverse preference dataset**, used for training powerful reward models and critic models. We collect about 64k prompts from diverse resources (including UltraChat, ShareGPT, Evol-Instruct, TruthfulQA, FalseQA, and FLAN). We then use these prompts to query multiple LLMs (see Table for model lists) and generate 4 different responses for each prompt, resulting in a total of 256k samples. To collect high-quality preference and textual feedback, we design a fine-grained annotation instruction, which contains 4 different aspects, namely **instruction-following**, **truthfulness**, **honesty** and **helpfulness**. We then ask GPT-4 to annotate the collected samples based on the instructions. ## Features - 🆚 **Scale**: UltraFeedback consists of 64k prompts, 256k responses and 380k high-quality feedback. RLHF researchers could further construct around 1 million comparison pairs to train their reward models. - 🌈 **Diversity**: As a preference dataset, diversity is the core requirement for UltraFeedback. We collect prompts from various sources and query a diverse set of state-of-the-art open-source and prestigious models. To further increase diversity, we intended to select different base models, i.e., LLaMA, Falcon, StarChat, MPT, GPT and Bard. We also apply various principles to stimulate models completing instructions in different ways. - 🤯 **High-density**: UltraFeedback provides both numerical and textual feedback. Moreover, we wrote fine-grained annotation documents to help rate responses in all dimensions ## Dataset Construction ### Instruction Sampling We sample 63,967 instructions from 6 public available and high-quality datasets. We include all instructions from TruthfulQA and FalseQA, randomly sampling 10k instructions from Evol-Instruct, 10k from UltraChat, and 20k from ShareGPT. For Flan, we adopt a stratified sampling strtegy, randomly samping 3k instructions from"Co" subset whereas sampling 10 instructions per task for the other three subsets, excluding those with overly long instructions. ```json { "evol_instruct": 10000, "false_qa": 2339, "flan": 20939, "sharegpt": 19949, "truthful_qa": 811, "ultrachat": 9929 } ``` ### Model Sampling To prevent reward model from overfiting to certain text style or capturing spurious correlation between text style and rewards, we select different base models of all levels, with varying sizes, architectures and training data, to complete the instructions. We set up a pool of 17 models: - Commercial Models: GPT-4, GPT-3.5 Turbo, Bard - LLaMA family: 1. LLaMA-2-7B-chat, LLaMA-2-13B-chat, LLaMA-2-70B-chat 2. UltraLM-13B, UltraLM-65B 3. WizardLM-7B, WizardLM-13B, WizardLM-70B 4. Vicuna-33B 5. Alpaca-7B - Non-LLaMA series: 1. Falcon-40B-instruct 2. MPT-30B-chat 3. StarChat-Beta 4. Pythia-12B ### Principle Sampling Following [1] and [2], we define a set of principles to explicitly align model behaviors from different aspects. We set up a pool of 5 principles: Helpfulness, Truthfulness, Honesty, Verbalized Calibration and Harmless. For each instruction, we randomly sample 4 models to complete the instruction, and for each completion, we sample a principle and add it to system prompt to align the model behavior. Considering different datasets outline different characteristics, not all dataset are suitable for all principles. We provide the following table to show the principle distribution for each dataset. | Datset | Principle | | ------------- | ------------------------------------------------------------ | | Evol Instruct | 100% Helpful | | FalseQA | 100% TruthfulQA | | Flan | 60% Helpful, 20% Truthful, 20% Verbalized Calibration | | ShareGPT | 60% Helpful, 20% Truthful, 18% Honesty, 2% Verbalized Calibration | | TruthfulQA | 100% Truthful | | UltraChat | 60% Helpful, 20% Truthful, 18% Honesty, 2% Verbalized Calibration | [1] Principle-Driven Self-Alignment of Language Models from Scratch with Minimal Human Supervision. Sun et al. [2] Orca: Progressive Learning from Complex Explanation Traces of GPT-4. Mukherjee et al. ## Dataset Format ```JSONC { "source": "sharegpt", // the dataset where the instruction comes from "id": "D2TC9GP_0", // the id of the instruction in the source dataset "instruction": "I am going to cairo in June of this year, thinking of 4-5 days, what are the best things I could do? Going with family of 5, granddad, grandmom, sister, mom and me. No days can have too much walking but sister, me and mom like adventure sports", "models": [ // four models that are randomly sampled from the model pool to complete the instruction "falcon-40b-instruct", "gpt-4", "starchat", "wizardlm-7b" ], "completions": [ // four completions corresponding to the four models { // completion 1 "model": "falcon-40b-instruct", "principle": "helpfulness", // priciple that used to align model behavior "custom_system_prompt": "As an AI assistant, ensure your response offers the perfect blend of accuracy, positivity, and intrigue. Strive to be educational while keeping the user engaged.", // the system prompt corresponding to the principle, sampled from a pool of GPT-4 generated system prompts "response": "Cairo is a city that has something for everyone. Some of the best things you can do are visiting the Pyramids of Giza and the museums housing the ancient Egyptian artifacts, taking a camel ride in the desert, visiting the medieval neighborhoods such as Islamic Cairo or Khan El-Khalili, trying local food in traditional restaurants like Ful medames, or venturing into one of the city's many souks to shop for authentic goods. For adventure sports, you can go whitewater rafting on the Nile, hot air ballooning over the desert, or diving in the Red Sea. Just be mindful of the scorching heat and ensure everyone wears appropriate clothing and drinks enough water. Above all, have fun and enjoy your trip!", "annotations": { "instruction_following": [ { "Rating": "2", "Rationale": "The text only partially addresses the task goal by providing a general list of activities in Cairo without considering the specific needs of the family members, such as limited walking ability and adventure preferences." } ], "honesty": [ { "Rating": "3", "Rationale": "The response is confident but contains subtle mistakes. It suggests whitewater rafting on the Nile and diving in the Red Sea, which are not feasible for a 4-5 day trip to Cairo. The rest of the suggestions are accurate and relevant to the family's preferences." } ], "truthfulness": [ { "Type": [ "1", "2" ], "Rationale": "The text suggests whitewater rafting on the Nile and diving in the Red Sea, which are not available in Cairo. These activities are factually incorrect and not aligned with the instruction.", "Rating": "3", "Rationale For Rating": "The text provides some truthful information, but the inclusion of whitewater rafting and diving in the Red Sea causes partial hallucination and misunderstanding." } ], "helpfulness": [ { "Type": [ "1", "2" ], "Rationale": "The response is clear and relevant, providing useful information, but it does not address the limited walking ability of the grandparents or the adventure sports preferences of the sister, mom, and the user.", "Rating": "3", "Rationale For Rating": "The text is correct and provides useful information, but it does not fully address the specific needs and preferences of the family." } ] } }, { // completion 2 ... }, { // completion 3 ... }, { // completion 4 ... } ] }, ``` ## Dataset Example Here we present an example of UltraFeedback > **User:** I am going to cairo in June of this year, thinking of 4-5 days, what are the best things I could do? Going with family of 5, granddad, grandmom, sister, mom and me. No days can have too much walking but sister, me and mom like adventure sports > **Assisstant 1 (falcon-40b-instruct):** Cairo is a city that has something for everyone. Some of the best things you can do are visiting the Pyramids of Giza and the museums housing the ancient Egyptian artifacts, taking a camel ride in the desert, visiting the medieval neighborhoods such as Islamic Cairo or Khan El-Khalili, trying local food in traditional restaurants like Ful medames, or venturing into one of the city's many souks to shop for authentic goods. For adventure sports, you can go whitewater rafting on the Nile, hot air ballooning over the desert, or diving in the Red Sea. Just be mindful of the scorching heat and ensure everyone wears appropriate clothing and drinks enough water. Above all, have fun and enjoy your trip! > **Assisstant 2 (gpt-4):** In Cairo for 4-5 days, the following activities cater to your family's preferences, with consideration for the limited walking ability of your grandparents:\n\n1. Visit the Pyramids of Giza and the Great Sphinx: A must-see in Cairo, it's an incredible experience for all ages. You can explore the premises using a hired camel, horse, or a horse-drawn carriage which accommodate the needs of your grandparents while keeping the experience enjoyable.\n\n2. Egyptian Museum: This museum in Tahrir Square contains over 120,000 artifacts of Egypt's history. A wheelchair can be arranged for the grandparents to comfortably explore the museum at their own pace.\n\n3. Khan el-Khalili Bazaar: A famous market in Cairo, suitable for souvenir shopping and experiencing the local culture. There are many cafes and restaurants your family can rest at during the visit.\n\n4. Nile River Cruise: Enjoy a dinner cruise with your family on the famous Nile River. It includes entertainment and a beautiful view of the city by night. Boats often have ramps and accessible facilities for limited mobility passangers.\n\n5. Felucca Ride: During the day, go for a more traditional sailboat ride called a Felucca. This can be done on an off-day or as a relaxation activity after an active day.\n\nFor adventure activities, you and your mom and sister can explore these options:\n\n1. Sandboarding: Head to the desert for a thrilling sandboarding experience on the dunes just outside Cairo.\n\n2. Hot Air Balloon Ride: Experience Egypt from above with a hot air balloon ride in Luxor. It would require a day trip from Cairo but is an incredible experience.\n\n3. Quad Biking: Go for a guided quad biking adventure in the desert, enjoying panoramic views of the iconic Pyramids.\n\nSpace the activities according to your family's preferences and ensure rest periods in the afternoon. Consider hiring a private guide or joining a wheelchair-accessible tour since you're concerned about your grandparents' walking ability. > **Assisstant 3 (starchat):** Here are some of the top things to do in Cairo, Egypt, with a family of 5:\n\n- Giza Pyramids: The Giza Pyramids, located just outside Cairo, are a must-see for anyone visiting the city. The Great Pyramid of Khufu, the Great Pyramid of Khafre, and the Great Pyramid of Menkaure make up one of the Seven Wonders of the Ancient World and are an impressive sight to behold.\n\n- Egyptian Museum: The Egyptian Museum, located in the heart of Cairo, is home to one of the world's largest and most impressive collections of ancient Egyptian art and artifacts. Visitors can explore the museum's extensive collection, which includes items such as mummies, sarcophagi, statues, paintings, and more.\n\n- Citadel of Salah El-Din: Located on a hill overlooking Cairo, the Citadel of Salah El-Din offers stunning views of the city and is an excellent place to get a sense of Cairo's rich history. Visitors can explore the various museums and mosques within the citadel, and also take a stroll through the beautiful gardens.\n\n- Khan El-Khalili Bazaar: Khan El-Khalili Bazaar is one of Cairo's most popular attractions, a bustling market where visitors can find everything from spices and pottery to clothes and jewelry. It's a great place to do some shopping and explore the local culture.\n\n- Alexandria: Just an hour's drive from Cairo, Alexandria is a historic city with a rich legacy. Visitors can explore the ancient Roman Amphitheater, the Qaitbay Citadel, and the Catacombs of Kom el-Shoqafa. They can also take a boat trip to the island of Montazah, where they can see the famous Graffiti Wall and other street art.\n\nAll of these attractions are family-friendly, with plenty of things to see and do. The Giza Pyramids in particular are very accessible and well-maintained, making them suitable for all ages. > **Assisstant 4 (wizardlm-7b):** If you are looking for adventure sports, you can explore the pyramids of Giza and go hot air ballooning over the desert. Another option could be to take a camel ride in the desert or go kayaking in the Nile River. For family-friendly activities, you can visit the Egyptian Museum, Cairo Tower, and Khan el-Khalili Bazaar. Additionally, you can take a day trip to the historical city of Alexandria and visit the Pompey's Pillar and the Roman Amphitheater. Another option could be to go on a guided tour of the ancient city of Petra in Jordan, which is a short flight from Cairo. ## Limitations - Although GPT-4 can provide well-aligned annotation and textual feedback for most samples, we must note that GPT-4 also makes mistakes and provides inaccurate feedbacks.
[ -0.4670335650444031, -0.5532603859901428, 0.26538410782814026, 0.2056867927312851, -0.13143348693847656, -0.19046282768249512, -0.2448776662349701, -0.3657328188419342, 0.0574999675154686, 0.3792131543159485, -0.6053358912467957, -0.6370009779930115, -0.31059402227401733, -0.02962884120643...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_80_11
arieg
2023-11-08T15:30:59Z
0
0
null
[ "region:us" ]
2023-11-08T15:30:59Z
2023-11-08T15:30:52.000Z
2023-11-08T15:30:52
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '27855' '1': '27856' '2': '27866' '3': '27945' '4': '27953' '5': '27975' '6': '27978' '7': '27981' '8': '27987' '9': '28241' '10': '28260' '11': '28477' '12': '28478' '13': '28479' '14': '28480' '15': '28481' '16': '28482' '17': '28483' '18': '28484' '19': '28485' '20': '28546' '21': '28548' '22': '28553' '23': '28571' '24': '28608' '25': '29045' '26': '29128' '27': '29180' '28': '29243' '29': '29245' '30': '29255' '31': '29271' '32': '29272' '33': '29355' '34': '29465' '35': '29480' '36': '29587' '37': '29602' '38': '29673' '39': '29718' '40': '29719' '41': '29720' '42': '29721' '43': '29738' '44': '29739' '45': '29740' '46': '29741' '47': '29742' '48': '29744' '49': '29745' '50': '29746' '51': '29747' '52': '29750' '53': '29752' '54': '29807' '55': '29813' '56': '29816' '57': '29961' '58': '29971' '59': '30041' '60': '30043' '61': '30050' '62': '30056' '63': '30058' '64': '30059' '65': '30090' '66': '30095' '67': '30120' '68': '30196' '69': '30198' '70': '30230' '71': '30486' '72': '30487' '73': '30488' '74': '30519' '75': '30520' '76': '30521' '77': '30522' '78': '30636' '79': '30690' splits: - name: train num_bytes: 89109867.2 num_examples: 1600 download_size: 88188426 dataset_size: 89109867.2 --- # Dataset Card for "bw_spec_cls_80_11" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7489754557609558, -0.16294658184051514, 0.2166617065668106, 0.3125706911087036, -0.2985157072544098, -0.11883316189050674, 0.023877430707216263, -0.35544759035110474, 0.6607466340065002, 0.4689841568470001, -0.6898095011711121, -0.7844113111495972, -0.6383059024810791, -0.15253150463104...
null
null
null
null
null
null
null
null
null
null
null
null
null
lilacai/nature-of-code-local-noc-book-github
lilacai
2023-11-08T15:48:10Z
0
0
null
[ "region:us" ]
2023-11-08T15:48:10Z
2023-11-08T15:47:53.000Z
2023-11-08T15:47:53
This dataset is generated by [Lilac](http://lilacml.com) for a HuggingFace Space: [huggingface.co/spaces/lilacai/nature-of-code](https://huggingface.co/spaces/lilacai/nature-of-code). Lilac dataset config: ```namespace: local name: noc-book-github source: repo: nature-of-code/noc-book-2023 ignore_directories: - docs - gatsby - magicbook - src ignore_file_extensions: - .png github_token: '' source_name: github embeddings: - path: content embedding: gte-small settings: ui: media_paths: - content markdown_paths: [] ```
[ -0.5004132390022278, -0.5526789426803589, 0.13670098781585693, 0.19917412102222443, -0.12239750474691391, 0.23341909050941467, 0.04915700852870941, 0.1483258306980133, 0.7530730366706848, 0.4262746572494507, -1.0625861883163452, -1.0849921703338623, -0.34386250376701355, 0.0761318579316139...
null
null
null
null
null
null
null
null
null
null
null
null
null
Wellkarte/VOZ
Wellkarte
2023-11-08T15:52:08Z
0
0
null
[ "region:us" ]
2023-11-08T15:52:08Z
2023-11-08T15:50:21.000Z
2023-11-08T15:50:21
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
shaaz10/AUBP
shaaz10
2023-11-08T15:50:56Z
0
0
null
[ "license:unknown", "region:us" ]
2023-11-08T15:50:56Z
2023-11-08T15:50:56.000Z
2023-11-08T15:50:56
--- license: unknown ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
jestx/test
jestx
2023-11-08T15:52:38Z
0
0
null
[ "region:us" ]
2023-11-08T15:52:38Z
2023-11-08T15:52:38.000Z
2023-11-08T15:52:38
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_80_12
arieg
2023-11-08T15:56:02Z
0
0
null
[ "region:us" ]
2023-11-08T15:56:02Z
2023-11-08T15:55:55.000Z
2023-11-08T15:55:55
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '30740' '1': '31040' '2': '31041' '3': '31042' '4': '31043' '5': '31044' '6': '31165' '7': '31356' '8': '31389' '9': '31390' '10': '31391' '11': '31392' '12': '31807' '13': '31887' '14': '31888' '15': '31889' '16': '31999' '17': '32001' '18': '32021' '19': '32075' '20': '32081' '21': '32218' '22': '32325' '23': '32326' '24': '32327' '25': '32328' '26': '32329' '27': '32330' '28': '32331' '29': '32332' '30': '32333' '31': '32334' '32': '32335' '33': '32336' '34': '32337' '35': '32338' '36': '32339' '37': '32340' '38': '32433' '39': '32437' '40': '32438' '41': '32439' '42': '32525' '43': '32686' '44': '32687' '45': '32689' '46': '32693' '47': '32694' '48': '32695' '49': '32755' '50': '32759' '51': '32760' '52': '32800' '53': '32882' '54': '33020' '55': '33049' '56': '33050' '57': '33064' '58': '33067' '59': '33068' '60': '33069' '61': '33070' '62': '33071' '63': '33072' '64': '33123' '65': '33124' '66': '33203' '67': '33216' '68': '33221' '69': '33278' '70': '33415' '71': '33422' '72': '33424' '73': '33426' '74': '33446' '75': '33459' '76': '33460' '77': '33461' '78': '33465' '79': '33477' splits: - name: train num_bytes: 88063676.8 num_examples: 1600 download_size: 88702877 dataset_size: 88063676.8 --- # Dataset Card for "bw_spec_cls_80_12" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7248864769935608, -0.1824997514486313, 0.19200874865055084, 0.3827657103538513, -0.28356924653053284, -0.13669027388095856, -0.0036699939519166946, -0.3525521755218506, 0.6225808262825012, 0.4570545256137848, -0.8283463716506958, -0.811136782169342, -0.6199139952659607, -0.1370758712291...
null
null
null
null
null
null
null
null
null
null
null
null
null
joyfine/TruthfulQA_CoT_GPT4
joyfine
2023-11-08T15:59:42Z
0
0
null
[ "region:us" ]
2023-11-08T15:59:42Z
2023-11-08T15:57:07.000Z
2023-11-08T15:57:07
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
argilla/uf-zephyr-iteration2
argilla
2023-11-08T16:09:25Z
0
0
null
[ "region:us" ]
2023-11-08T16:09:25Z
2023-11-08T15:59:14.000Z
2023-11-08T15:59:14
--- dataset_info: features: - name: input dtype: string splits: - name: train num_bytes: 6238 num_examples: 10 download_size: 6878 dataset_size: 6238 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "uf-zephyr-iteration2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.28209325671195984, -0.19008144736289978, 0.23247535526752472, 0.12234264612197876, -0.29739049077033997, 0.20536139607429504, 0.34226447343826294, -0.191325843334198, 0.5972981452941895, 0.3754804730415344, -0.8313505053520203, -0.47082144021987915, -0.2519538700580597, -0.3057909905910...
null
null
null
null
null
null
null
null
null
null
null
null
null
arthurmluz/xlsum_data-wiki_gptextsum_results
arthurmluz
2023-11-08T16:16:16Z
0
0
null
[ "region:us" ]
2023-11-08T16:16:16Z
2023-11-08T16:05:20.000Z
2023-11-08T16:05:20
--- dataset_info: features: - name: id dtype: string - name: url dtype: string - name: title dtype: string - name: summary dtype: string - name: text dtype: string - name: gen_summary dtype: string - name: rouge struct: - name: rouge1 dtype: float64 - name: rouge2 dtype: float64 - name: rougeL dtype: float64 - name: rougeLsum dtype: float64 - name: bert struct: - name: f1 sequence: float64 - name: hashcode dtype: string - name: precision sequence: float64 - name: recall sequence: float64 splits: - name: validation num_bytes: 26768065 num_examples: 7175 download_size: 16308689 dataset_size: 26768065 configs: - config_name: default data_files: - split: validation path: data/validation-* --- # Dataset Card for "xlsum_data-wiki_gptextsum_results" rouge= {'rouge1': 0.25431289948140545, 'rouge2': 0.06235500652708138, 'rougeL': 0.16542968042081027, 'rougeLsum': 0.16542968042081027} bert= {'precision': 0.6915352174463172, 'recall': 0.7106410252424898, 'f1': 0.7006534646030917}
[ -0.279214084148407, -0.4699738025665283, 0.3081328272819519, -0.2204076051712036, -0.35292381048202515, -0.2233511209487915, -0.2555720806121826, -0.07220826297998428, 0.812698483467102, 0.3876397907733917, -0.4067594110965729, -0.8601589798927307, -0.8436598181724548, 0.12594737112522125,...
null
null
null
null
null
null
null
null
null
null
null
null
null
KaraKaraWitch/Blue
KaraKaraWitch
2023-11-08T16:10:01Z
0
0
null
[ "region:us" ]
2023-11-08T16:10:01Z
2023-11-08T16:09:00.000Z
2023-11-08T16:09:00
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_OpenBuddy__openbuddy-llama2-70b-v10.1-bf16_public
open-llm-leaderboard
2023-11-08T16:10:27Z
0
0
null
[ "region:us" ]
2023-11-08T16:10:27Z
2023-11-08T16:10:18.000Z
2023-11-08T16:10:18
--- pretty_name: Evaluation run of OpenBuddy/openbuddy-llama2-70b-v10.1-bf16 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [OpenBuddy/openbuddy-llama2-70b-v10.1-bf16](https://huggingface.co/OpenBuddy/openbuddy-llama2-70b-v10.1-bf16)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 3 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_OpenBuddy__openbuddy-llama2-70b-v10.1-bf16_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-08T16:10:00.132989](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenBuddy__openbuddy-llama2-70b-v10.1-bf16_public/blob/main/results_2023-11-08T16-10-00.132989.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.5072357382550335,\n\ \ \"em_stderr\": 0.0051199317896190475,\n \"f1\": 0.563010696308727,\n\ \ \"f1_stderr\": 0.00483160969587092,\n \"acc\": 0.7019171563925458,\n\ \ \"acc_stderr\": 0.012348644812426555\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.5072357382550335,\n \"em_stderr\": 0.0051199317896190475,\n\ \ \"f1\": 0.563010696308727,\n \"f1_stderr\": 0.00483160969587092\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6027293404094011,\n \ \ \"acc_stderr\": 0.013478659652337792\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.8011049723756906,\n \"acc_stderr\": 0.01121862997251532\n\ \ }\n}\n```" repo_url: https://huggingface.co/OpenBuddy/openbuddy-llama2-70b-v10.1-bf16 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_drop_3 data_files: - split: 2023_11_08T16_10_00.132989 path: - '**/details_harness|drop|3_2023-11-08T16-10-00.132989.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-08T16-10-00.132989.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_08T16_10_00.132989 path: - '**/details_harness|gsm8k|5_2023-11-08T16-10-00.132989.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-08T16-10-00.132989.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_08T16_10_00.132989 path: - '**/details_harness|winogrande|5_2023-11-08T16-10-00.132989.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-08T16-10-00.132989.parquet' - config_name: results data_files: - split: 2023_11_08T16_10_00.132989 path: - results_2023-11-08T16-10-00.132989.parquet - split: latest path: - results_2023-11-08T16-10-00.132989.parquet --- # Dataset Card for Evaluation run of OpenBuddy/openbuddy-llama2-70b-v10.1-bf16 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/OpenBuddy/openbuddy-llama2-70b-v10.1-bf16 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [OpenBuddy/openbuddy-llama2-70b-v10.1-bf16](https://huggingface.co/OpenBuddy/openbuddy-llama2-70b-v10.1-bf16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_OpenBuddy__openbuddy-llama2-70b-v10.1-bf16_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T16:10:00.132989](https://huggingface.co/datasets/open-llm-leaderboard/details_OpenBuddy__openbuddy-llama2-70b-v10.1-bf16_public/blob/main/results_2023-11-08T16-10-00.132989.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.5072357382550335, "em_stderr": 0.0051199317896190475, "f1": 0.563010696308727, "f1_stderr": 0.00483160969587092, "acc": 0.7019171563925458, "acc_stderr": 0.012348644812426555 }, "harness|drop|3": { "em": 0.5072357382550335, "em_stderr": 0.0051199317896190475, "f1": 0.563010696308727, "f1_stderr": 0.00483160969587092 }, "harness|gsm8k|5": { "acc": 0.6027293404094011, "acc_stderr": 0.013478659652337792 }, "harness|winogrande|5": { "acc": 0.8011049723756906, "acc_stderr": 0.01121862997251532 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.41101381182670593, -0.772184431552887, 0.20842966437339783, 0.2645404040813446, -0.18864455819129944, 0.12964096665382385, -0.380379319190979, -0.21639274060726166, 0.40440428256988525, 0.5050144791603088, -0.5820167660713196, -0.9542180895805359, -0.6305428743362427, 0.1248372420668602...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_80_13
arieg
2023-11-08T16:21:14Z
0
0
null
[ "region:us" ]
2023-11-08T16:21:14Z
2023-11-08T16:21:06.000Z
2023-11-08T16:21:06
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '33538' '1': '34003' '2': '34147' '3': '34167' '4': '34257' '5': '34258' '6': '34263' '7': '34484' '8': '34510' '9': '34511' '10': '34994' '11': '34996' '12': '35007' '13': '35296' '14': '35299' '15': '35443' '16': '35444' '17': '35527' '18': '35534' '19': '35535' '20': '35537' '21': '35539' '22': '35541' '23': '35543' '24': '35571' '25': '35734' '26': '36143' '27': '36144' '28': '36145' '29': '36146' '30': '36147' '31': '36272' '32': '36273' '33': '36275' '34': '36277' '35': '36304' '36': '36322' '37': '36333' '38': '36371' '39': '36380' '40': '36388' '41': '36428' '42': '36435' '43': '36481' '44': '36526' '45': '36567' '46': '36614' '47': '36615' '48': '36616' '49': '36618' '50': '36643' '51': '36659' '52': '36983' '53': '36984' '54': '36985' '55': '36986' '56': '36987' '57': '36988' '58': '36990' '59': '36992' '60': '36994' '61': '36997' '62': '36999' '63': '37041' '64': '37111' '65': '37113' '66': '37119' '67': '37121' '68': '37131' '69': '37136' '70': '37141' '71': '37147' '72': '37324' '73': '37325' '74': '37368' '75': '37369' '76': '37416' '77': '37417' '78': '37423' '79': '37538' splits: - name: train num_bytes: 86840953.6 num_examples: 1600 download_size: 86643054 dataset_size: 86840953.6 --- # Dataset Card for "bw_spec_cls_80_13" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7273585200309753, -0.23410223424434662, 0.20342756807804108, 0.41004225611686707, -0.31306323409080505, -0.1584303230047226, -0.00500372564420104, -0.30006465315818787, 0.5988637208938599, 0.46790844202041626, -0.8348731994628906, -0.8121002316474915, -0.6419312953948975, -0.06403900682...
null
null
null
null
null
null
null
null
null
null
null
null
null
NikiTricky/test2
NikiTricky
2023-11-08T16:41:58Z
0
0
null
[ "size_categories:n<1K", "region:us" ]
2023-11-08T16:41:58Z
2023-11-08T16:29:16.000Z
2023-11-08T16:29:16
--- size_categories: - n<1K ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_elinas__chronos007-70b_public
open-llm-leaderboard
2023-11-08T16:36:36Z
0
0
null
[ "region:us" ]
2023-11-08T16:36:36Z
2023-11-08T16:36:28.000Z
2023-11-08T16:36:28
--- pretty_name: Evaluation run of elinas/chronos007-70b dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [elinas/chronos007-70b](https://huggingface.co/elinas/chronos007-70b) on the [Open\ \ LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 3 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_elinas__chronos007-70b_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-08T16:36:09.949809](https://huggingface.co/datasets/open-llm-leaderboard/details_elinas__chronos007-70b_public/blob/main/results_2023-11-08T16-36-09.949809.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.08756291946308725,\n\ \ \"em_stderr\": 0.002894684468980241,\n \"f1\": 0.1552086828859053,\n\ \ \"f1_stderr\": 0.0030733731115224513,\n \"acc\": 0.6242477589094606,\n\ \ \"acc_stderr\": 0.012180910628722973\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.08756291946308725,\n \"em_stderr\": 0.002894684468980241,\n\ \ \"f1\": 0.1552086828859053,\n \"f1_stderr\": 0.0030733731115224513\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.42608036391205456,\n \ \ \"acc_stderr\": 0.013621144396086709\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.8224151539068666,\n \"acc_stderr\": 0.010740676861359238\n\ \ }\n}\n```" repo_url: https://huggingface.co/elinas/chronos007-70b leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_drop_3 data_files: - split: 2023_11_08T16_36_09.949809 path: - '**/details_harness|drop|3_2023-11-08T16-36-09.949809.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-08T16-36-09.949809.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_08T16_36_09.949809 path: - '**/details_harness|gsm8k|5_2023-11-08T16-36-09.949809.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-08T16-36-09.949809.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_08T16_36_09.949809 path: - '**/details_harness|winogrande|5_2023-11-08T16-36-09.949809.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-08T16-36-09.949809.parquet' - config_name: results data_files: - split: 2023_11_08T16_36_09.949809 path: - results_2023-11-08T16-36-09.949809.parquet - split: latest path: - results_2023-11-08T16-36-09.949809.parquet --- # Dataset Card for Evaluation run of elinas/chronos007-70b ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/elinas/chronos007-70b - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [elinas/chronos007-70b](https://huggingface.co/elinas/chronos007-70b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_elinas__chronos007-70b_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T16:36:09.949809](https://huggingface.co/datasets/open-llm-leaderboard/details_elinas__chronos007-70b_public/blob/main/results_2023-11-08T16-36-09.949809.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.08756291946308725, "em_stderr": 0.002894684468980241, "f1": 0.1552086828859053, "f1_stderr": 0.0030733731115224513, "acc": 0.6242477589094606, "acc_stderr": 0.012180910628722973 }, "harness|drop|3": { "em": 0.08756291946308725, "em_stderr": 0.002894684468980241, "f1": 0.1552086828859053, "f1_stderr": 0.0030733731115224513 }, "harness|gsm8k|5": { "acc": 0.42608036391205456, "acc_stderr": 0.013621144396086709 }, "harness|winogrande|5": { "acc": 0.8224151539068666, "acc_stderr": 0.010740676861359238 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.37110304832458496, -0.6523422002792358, 0.2022702842950821, 0.13085050880908966, -0.20804399251937866, 0.08879291266202927, -0.37256500124931335, -0.24757206439971924, 0.4725959300994873, 0.5430088043212891, -0.6926896572113037, -0.934134840965271, -0.6575735211372375, 0.233151748776435...
null
null
null
null
null
null
null
null
null
null
null
null
null
pidakwo/imdb_reviews
pidakwo
2023-11-08T16:40:45Z
0
0
null
[ "license:afl-3.0", "region:us" ]
2023-11-08T16:40:45Z
2023-11-08T16:40:45.000Z
2023-11-08T16:40:45
--- license: afl-3.0 ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
Fredd2/Dreambooth3
Fredd2
2023-11-08T16:56:35Z
0
0
null
[ "region:us" ]
2023-11-08T16:56:35Z
2023-11-08T16:44:23.000Z
2023-11-08T16:44:23
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
AnilKamat/Ducks
AnilKamat
2023-11-15T23:27:23Z
0
0
null
[ "license:apache-2.0", "region:us" ]
2023-11-15T23:27:23Z
2023-11-08T16:45:17.000Z
2023-11-08T16:45:17
--- license: apache-2.0 ---
[ -0.12853392958641052, -0.18616779148578644, 0.6529127955436707, 0.49436280131340027, -0.19319361448287964, 0.23607419431209564, 0.36072003841400146, 0.050563063472509384, 0.579365611076355, 0.7400140762329102, -0.6508104205131531, -0.23783954977989197, -0.7102249264717102, -0.0478260256350...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_80_14
arieg
2023-11-08T16:46:08Z
0
0
null
[ "region:us" ]
2023-11-08T16:46:08Z
2023-11-08T16:46:01.000Z
2023-11-08T16:46:01
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '37592' '1': '37725' '2': '37727' '3': '37730' '4': '37731' '5': '37779' '6': '37781' '7': '37784' '8': '37859' '9': '37911' '10': '37920' '11': '38312' '12': '38321' '13': '38323' '14': '38326' '15': '38351' '16': '38352' '17': '38353' '18': '38354' '19': '38361' '20': '38362' '21': '38363' '22': '38365' '23': '38399' '24': '38435' '25': '38450' '26': '38522' '27': '38557' '28': '38560' '29': '38847' '30': '38878' '31': '38955' '32': '38961' '33': '38964' '34': '38965' '35': '38966' '36': '38967' '37': '39188' '38': '39259' '39': '39278' '40': '39291' '41': '39298' '42': '39357' '43': '39359' '44': '39378' '45': '39484' '46': '39488' '47': '39530' '48': '39658' '49': '39659' '50': '39660' '51': '39661' '52': '39662' '53': '39663' '54': '39664' '55': '39665' '56': '39666' '57': '39667' '58': '39875' '59': '39900' '60': '39904' '61': '40121' '62': '40122' '63': '40123' '64': '40133' '65': '40134' '66': '40139' '67': '40141' '68': '40147' '69': '40161' '70': '40180' '71': '40182' '72': '40250' '73': '40509' '74': '40525' '75': '40541' '76': '40542' '77': '40598' '78': '40683' '79': '40725' splits: - name: train num_bytes: 88556044.8 num_examples: 1600 download_size: 88996626 dataset_size: 88556044.8 --- # Dataset Card for "bw_spec_cls_80_14" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.757017195224762, -0.18455880880355835, 0.19703766703605652, 0.4055716097354889, -0.30807894468307495, -0.21330894529819489, 0.025667734444141388, -0.3075890839099884, 0.5417079925537109, 0.5743477940559387, -0.8124002814292908, -0.8320363163948059, -0.6283511519432068, 0.007935846224427...
null
null
null
null
null
null
null
null
null
null
null
null
null
Jojolands/aryrao_data
Jojolands
2023-11-08T17:24:22Z
0
0
null
[ "license:openrail", "region:us" ]
2023-11-08T17:24:22Z
2023-11-08T16:49:14.000Z
2023-11-08T16:49:14
--- license: openrail ---
[ -0.1285335123538971, -0.1861683875322342, 0.6529128551483154, 0.49436232447624207, -0.19319400191307068, 0.23607441782951355, 0.36072009801864624, 0.05056373029947281, 0.5793656706809998, 0.7400146722793579, -0.650810182094574, -0.23784008622169495, -0.7102247476577759, -0.0478255338966846...
null
null
null
null
null
null
null
null
null
null
null
null
null
determined-ai/xsum_short
determined-ai
2023-11-08T16:52:46Z
0
0
null
[ "region:us" ]
2023-11-08T16:52:46Z
2023-11-08T16:52:44.000Z
2023-11-08T16:52:44
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: document dtype: string - name: summary dtype: string - name: id dtype: string splits: - name: train num_bytes: 1007520.5456002352 num_examples: 429 download_size: 51135 dataset_size: 1007520.5456002352 --- # Dataset Card for "xsum_short" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.4929354786872864, -0.035909853875637054, 0.38539567589759827, 0.015671269968152046, -0.4475768804550171, 0.07569298148155212, 0.13694638013839722, -0.13786956667900085, 1.2765252590179443, 0.4619646370410919, -0.7279803156852722, -0.7681804895401001, -0.6771525144577026, -0.259171575307...
null
null
null
null
null
null
null
null
null
null
null
null
null
manu/swiss_legislation
manu
2023-11-08T16:57:39Z
0
0
null
[ "region:us" ]
2023-11-08T16:57:39Z
2023-11-08T16:57:12.000Z
2023-11-08T16:57:12
--- dataset_info: features: - name: text dtype: string - name: id dtype: string splits: - name: train num_bytes: 276089490 num_examples: 11197 download_size: 114594480 dataset_size: 276089490 --- # Dataset Card for "swiss_legislation" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.44086650013923645, -0.32559385895729065, 0.31483909487724304, 0.1779288649559021, -0.426035612821579, -0.21854975819587708, 0.30913156270980835, -0.11853103339672089, 0.6809385418891907, 0.8486723303794861, -0.834759533405304, -0.915065348148346, -0.5142108798027039, -0.3876312673091888...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_PeanutJar__Mistral-v0.1-PeanutButter-v0.0.2-7B_public
open-llm-leaderboard
2023-11-08T16:59:57Z
0
0
null
[ "region:us" ]
2023-11-08T16:59:57Z
2023-11-08T16:58:50.000Z
2023-11-08T16:58:50
--- pretty_name: Evaluation run of PeanutJar/Mistral-v0.1-PeanutButter-v0.0.2-7B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [PeanutJar/Mistral-v0.1-PeanutButter-v0.0.2-7B](https://huggingface.co/PeanutJar/Mistral-v0.1-PeanutButter-v0.0.2-7B)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_PeanutJar__Mistral-v0.1-PeanutButter-v0.0.2-7B_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-08T16:55:51.659477](https://huggingface.co/datasets/open-llm-leaderboard/details_PeanutJar__Mistral-v0.1-PeanutButter-v0.0.2-7B_public/blob/main/results_2023-11-08T16-55-51.659477.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6373187668436219,\n\ \ \"acc_stderr\": 0.032198754972879835,\n \"acc_norm\": 0.6466738397796956,\n\ \ \"acc_norm_stderr\": 0.03289417683130313,\n \"mc1\": 0.3023255813953488,\n\ \ \"mc1_stderr\": 0.016077509266133026,\n \"mc2\": 0.45917059894485873,\n\ \ \"mc2_stderr\": 0.014568844776078935,\n \"em\": 0.0053481543624161075,\n\ \ \"em_stderr\": 0.0007469252903319645,\n \"f1\": 0.06526845637583892,\n\ \ \"f1_stderr\": 0.0014953213199520477\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5844709897610921,\n \"acc_stderr\": 0.014401366641216388,\n\ \ \"acc_norm\": 0.6177474402730375,\n \"acc_norm_stderr\": 0.014200454049979277\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6389165504879506,\n\ \ \"acc_stderr\": 0.004793330525656208,\n \"acc_norm\": 0.8410675164309899,\n\ \ \"acc_norm_stderr\": 0.003648659041493648\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \ \ \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n\ \ \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n\ \ \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.6710526315789473,\n \"acc_stderr\": 0.03823428969926604,\n\ \ \"acc_norm\": 0.6710526315789473,\n \"acc_norm_stderr\": 0.03823428969926604\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n\ \ \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \ \ \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6943396226415094,\n \"acc_stderr\": 0.028353298073322666,\n\ \ \"acc_norm\": 0.6943396226415094,\n \"acc_norm_stderr\": 0.028353298073322666\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n\ \ \"acc_stderr\": 0.03716177437566017,\n \"acc_norm\": 0.7291666666666666,\n\ \ \"acc_norm_stderr\": 0.03716177437566017\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \ \ \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n \ \ },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\"\ : 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n\ \ \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n\ \ \"acc_stderr\": 0.036563436533531585,\n \"acc_norm\": 0.6416184971098265,\n\ \ \"acc_norm_stderr\": 0.036563436533531585\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.04940635630605659,\n\ \ \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.04940635630605659\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.78,\n \"acc_stderr\": 0.04163331998932263,\n \"acc_norm\": 0.78,\n\ \ \"acc_norm_stderr\": 0.04163331998932263\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n\ \ \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n\ \ \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \ \ \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482757,\n\ \ \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482757\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.3835978835978836,\n \"acc_stderr\": 0.025043757318520196,\n \"\ acc_norm\": 0.3835978835978836,\n \"acc_norm_stderr\": 0.025043757318520196\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.35714285714285715,\n\ \ \"acc_stderr\": 0.042857142857142816,\n \"acc_norm\": 0.35714285714285715,\n\ \ \"acc_norm_stderr\": 0.042857142857142816\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \ \ \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7580645161290323,\n\ \ \"acc_stderr\": 0.024362599693031096,\n \"acc_norm\": 0.7580645161290323,\n\ \ \"acc_norm_stderr\": 0.024362599693031096\n },\n \"harness|hendrycksTest-high_school_chemistry|5\"\ : {\n \"acc\": 0.5369458128078818,\n \"acc_stderr\": 0.035083705204426656,\n\ \ \"acc_norm\": 0.5369458128078818,\n \"acc_norm_stderr\": 0.035083705204426656\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\"\ : 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n\ \ \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7676767676767676,\n \"acc_stderr\": 0.030088629490217487,\n \"\ acc_norm\": 0.7676767676767676,\n \"acc_norm_stderr\": 0.030088629490217487\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.8756476683937824,\n \"acc_stderr\": 0.023814477086593542,\n\ \ \"acc_norm\": 0.8756476683937824,\n \"acc_norm_stderr\": 0.023814477086593542\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.6461538461538462,\n \"acc_stderr\": 0.024243783994062157,\n\ \ \"acc_norm\": 0.6461538461538462,\n \"acc_norm_stderr\": 0.024243783994062157\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.35185185185185186,\n \"acc_stderr\": 0.029116617606083015,\n \ \ \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.029116617606083015\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.6302521008403361,\n \"acc_stderr\": 0.03135709599613591,\n \ \ \"acc_norm\": 0.6302521008403361,\n \"acc_norm_stderr\": 0.03135709599613591\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242742,\n \"\ acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242742\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.8220183486238533,\n \"acc_stderr\": 0.016399436366612927,\n \"\ acc_norm\": 0.8220183486238533,\n \"acc_norm_stderr\": 0.016399436366612927\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.5509259259259259,\n \"acc_stderr\": 0.033922384053216174,\n \"\ acc_norm\": 0.5509259259259259,\n \"acc_norm_stderr\": 0.033922384053216174\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7892156862745098,\n \"acc_stderr\": 0.028626547912437406,\n \"\ acc_norm\": 0.7892156862745098,\n \"acc_norm_stderr\": 0.028626547912437406\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7679324894514767,\n \"acc_stderr\": 0.02747974455080851,\n \ \ \"acc_norm\": 0.7679324894514767,\n \"acc_norm_stderr\": 0.02747974455080851\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n\ \ \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n\ \ \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159463,\n\ \ \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159463\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"\ acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n\ \ },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n\ \ \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n\ \ \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.03226219377286775,\n\ \ \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.03226219377286775\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5,\n\ \ \"acc_stderr\": 0.04745789978762494,\n \"acc_norm\": 0.5,\n \ \ \"acc_norm_stderr\": 0.04745789978762494\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.036756688322331886,\n\ \ \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.036756688322331886\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n\ \ \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n\ \ \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \ \ \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8122605363984674,\n\ \ \"acc_stderr\": 0.013964393769899126,\n \"acc_norm\": 0.8122605363984674,\n\ \ \"acc_norm_stderr\": 0.013964393769899126\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.7167630057803468,\n \"acc_stderr\": 0.024257901705323378,\n\ \ \"acc_norm\": 0.7167630057803468,\n \"acc_norm_stderr\": 0.024257901705323378\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3329608938547486,\n\ \ \"acc_stderr\": 0.015761716178397563,\n \"acc_norm\": 0.3329608938547486,\n\ \ \"acc_norm_stderr\": 0.015761716178397563\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.024630048979824775,\n\ \ \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.024630048979824775\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n\ \ \"acc_stderr\": 0.025670259242188943,\n \"acc_norm\": 0.7138263665594855,\n\ \ \"acc_norm_stderr\": 0.025670259242188943\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.75,\n \"acc_stderr\": 0.02409347123262133,\n \ \ \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.02409347123262133\n \ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\"\ : 0.5070921985815603,\n \"acc_stderr\": 0.02982449855912901,\n \"\ acc_norm\": 0.5070921985815603,\n \"acc_norm_stderr\": 0.02982449855912901\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4556714471968709,\n\ \ \"acc_stderr\": 0.012719949543032207,\n \"acc_norm\": 0.4556714471968709,\n\ \ \"acc_norm_stderr\": 0.012719949543032207\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.028501452860396556,\n\ \ \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.028501452860396556\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.6830065359477124,\n \"acc_stderr\": 0.018824219512706214,\n \ \ \"acc_norm\": 0.6830065359477124,\n \"acc_norm_stderr\": 0.018824219512706214\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n\ \ \"acc_stderr\": 0.04607582090719976,\n \"acc_norm\": 0.6363636363636364,\n\ \ \"acc_norm_stderr\": 0.04607582090719976\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n\ \ \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n\ \ \"acc_stderr\": 0.026508590656233264,\n \"acc_norm\": 0.8308457711442786,\n\ \ \"acc_norm_stderr\": 0.026508590656233264\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.88,\n \"acc_stderr\": 0.03265986323710906,\n \ \ \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.03265986323710906\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n\ \ \"acc_stderr\": 0.038695433234721015,\n \"acc_norm\": 0.5542168674698795,\n\ \ \"acc_norm_stderr\": 0.038695433234721015\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640038,\n\ \ \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640038\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3023255813953488,\n\ \ \"mc1_stderr\": 0.016077509266133026,\n \"mc2\": 0.45917059894485873,\n\ \ \"mc2_stderr\": 0.014568844776078935\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7837411207576953,\n \"acc_stderr\": 0.01157061486140935\n\ \ },\n \"harness|drop|3\": {\n \"em\": 0.0053481543624161075,\n \ \ \"em_stderr\": 0.0007469252903319645,\n \"f1\": 0.06526845637583892,\n\ \ \"f1_stderr\": 0.0014953213199520477\n },\n \"harness|gsm8k|5\":\ \ {\n \"acc\": 0.17437452615617893,\n \"acc_stderr\": 0.010451421361976233\n\ \ }\n}\n```" repo_url: https://huggingface.co/PeanutJar/Mistral-v0.1-PeanutButter-v0.0.2-7B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|arc:challenge|25_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-08T16-55-51.659477.parquet' - config_name: harness_drop_3 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|drop|3_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-08T16-55-51.659477.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|gsm8k|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hellaswag|10_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-08T16-55-51.659477.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-management|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T16-55-51.659477.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|truthfulqa:mc|0_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-08T16-55-51.659477.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_08T16_55_51.659477 path: - '**/details_harness|winogrande|5_2023-11-08T16-55-51.659477.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-08T16-55-51.659477.parquet' - config_name: results data_files: - split: 2023_11_08T16_55_51.659477 path: - results_2023-11-08T16-55-51.659477.parquet - split: latest path: - results_2023-11-08T16-55-51.659477.parquet --- # Dataset Card for Evaluation run of PeanutJar/Mistral-v0.1-PeanutButter-v0.0.2-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/PeanutJar/Mistral-v0.1-PeanutButter-v0.0.2-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [PeanutJar/Mistral-v0.1-PeanutButter-v0.0.2-7B](https://huggingface.co/PeanutJar/Mistral-v0.1-PeanutButter-v0.0.2-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_PeanutJar__Mistral-v0.1-PeanutButter-v0.0.2-7B_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T16:55:51.659477](https://huggingface.co/datasets/open-llm-leaderboard/details_PeanutJar__Mistral-v0.1-PeanutButter-v0.0.2-7B_public/blob/main/results_2023-11-08T16-55-51.659477.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6373187668436219, "acc_stderr": 0.032198754972879835, "acc_norm": 0.6466738397796956, "acc_norm_stderr": 0.03289417683130313, "mc1": 0.3023255813953488, "mc1_stderr": 0.016077509266133026, "mc2": 0.45917059894485873, "mc2_stderr": 0.014568844776078935, "em": 0.0053481543624161075, "em_stderr": 0.0007469252903319645, "f1": 0.06526845637583892, "f1_stderr": 0.0014953213199520477 }, "harness|arc:challenge|25": { "acc": 0.5844709897610921, "acc_stderr": 0.014401366641216388, "acc_norm": 0.6177474402730375, "acc_norm_stderr": 0.014200454049979277 }, "harness|hellaswag|10": { "acc": 0.6389165504879506, "acc_stderr": 0.004793330525656208, "acc_norm": 0.8410675164309899, "acc_norm_stderr": 0.003648659041493648 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6710526315789473, "acc_stderr": 0.03823428969926604, "acc_norm": 0.6710526315789473, "acc_norm_stderr": 0.03823428969926604 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6943396226415094, "acc_stderr": 0.028353298073322666, "acc_norm": 0.6943396226415094, "acc_norm_stderr": 0.028353298073322666 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566017, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566017 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6416184971098265, "acc_stderr": 0.036563436533531585, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.036563436533531585 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.04940635630605659, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.04940635630605659 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.04163331998932263, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932263 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482757, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482757 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3835978835978836, "acc_stderr": 0.025043757318520196, "acc_norm": 0.3835978835978836, "acc_norm_stderr": 0.025043757318520196 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.35714285714285715, "acc_stderr": 0.042857142857142816, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.042857142857142816 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7580645161290323, "acc_stderr": 0.024362599693031096, "acc_norm": 0.7580645161290323, "acc_norm_stderr": 0.024362599693031096 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5369458128078818, "acc_stderr": 0.035083705204426656, "acc_norm": 0.5369458128078818, "acc_norm_stderr": 0.035083705204426656 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7676767676767676, "acc_stderr": 0.030088629490217487, "acc_norm": 0.7676767676767676, "acc_norm_stderr": 0.030088629490217487 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8756476683937824, "acc_stderr": 0.023814477086593542, "acc_norm": 0.8756476683937824, "acc_norm_stderr": 0.023814477086593542 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6461538461538462, "acc_stderr": 0.024243783994062157, "acc_norm": 0.6461538461538462, "acc_norm_stderr": 0.024243783994062157 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.029116617606083015, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.029116617606083015 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6302521008403361, "acc_stderr": 0.03135709599613591, "acc_norm": 0.6302521008403361, "acc_norm_stderr": 0.03135709599613591 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242742, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242742 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8220183486238533, "acc_stderr": 0.016399436366612927, "acc_norm": 0.8220183486238533, "acc_norm_stderr": 0.016399436366612927 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5509259259259259, "acc_stderr": 0.033922384053216174, "acc_norm": 0.5509259259259259, "acc_norm_stderr": 0.033922384053216174 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7892156862745098, "acc_stderr": 0.028626547912437406, "acc_norm": 0.7892156862745098, "acc_norm_stderr": 0.028626547912437406 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7679324894514767, "acc_stderr": 0.02747974455080851, "acc_norm": 0.7679324894514767, "acc_norm_stderr": 0.02747974455080851 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159463, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159463 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7852760736196319, "acc_stderr": 0.03226219377286775, "acc_norm": 0.7852760736196319, "acc_norm_stderr": 0.03226219377286775 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5, "acc_stderr": 0.04745789978762494, "acc_norm": 0.5, "acc_norm_stderr": 0.04745789978762494 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.036756688322331886, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.036756688322331886 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8122605363984674, "acc_stderr": 0.013964393769899126, "acc_norm": 0.8122605363984674, "acc_norm_stderr": 0.013964393769899126 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7167630057803468, "acc_stderr": 0.024257901705323378, "acc_norm": 0.7167630057803468, "acc_norm_stderr": 0.024257901705323378 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3329608938547486, "acc_stderr": 0.015761716178397563, "acc_norm": 0.3329608938547486, "acc_norm_stderr": 0.015761716178397563 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7549019607843137, "acc_stderr": 0.024630048979824775, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.024630048979824775 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7138263665594855, "acc_stderr": 0.025670259242188943, "acc_norm": 0.7138263665594855, "acc_norm_stderr": 0.025670259242188943 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.75, "acc_stderr": 0.02409347123262133, "acc_norm": 0.75, "acc_norm_stderr": 0.02409347123262133 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5070921985815603, "acc_stderr": 0.02982449855912901, "acc_norm": 0.5070921985815603, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4556714471968709, "acc_stderr": 0.012719949543032207, "acc_norm": 0.4556714471968709, "acc_norm_stderr": 0.012719949543032207 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.028501452860396556, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.028501452860396556 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6830065359477124, "acc_stderr": 0.018824219512706214, "acc_norm": 0.6830065359477124, "acc_norm_stderr": 0.018824219512706214 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.04607582090719976, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.04607582090719976 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.026508590656233264, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.026508590656233264 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.03265986323710906, "acc_norm": 0.88, "acc_norm_stderr": 0.03265986323710906 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.038695433234721015, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.038695433234721015 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.029547741687640038, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.029547741687640038 }, "harness|truthfulqa:mc|0": { "mc1": 0.3023255813953488, "mc1_stderr": 0.016077509266133026, "mc2": 0.45917059894485873, "mc2_stderr": 0.014568844776078935 }, "harness|winogrande|5": { "acc": 0.7837411207576953, "acc_stderr": 0.01157061486140935 }, "harness|drop|3": { "em": 0.0053481543624161075, "em_stderr": 0.0007469252903319645, "f1": 0.06526845637583892, "f1_stderr": 0.0014953213199520477 }, "harness|gsm8k|5": { "acc": 0.17437452615617893, "acc_stderr": 0.010451421361976233 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7026668190956116, -0.888922393321991, 0.24415066838264465, 0.26593050360679626, -0.15350158512592316, -0.029411418363451958, -0.0203519519418478, -0.21970437467098236, 0.5533960461616516, -0.0376875177025795, -0.4514075815677643, -0.6911084055900574, -0.46484997868537903, 0.243716984987...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_psmathur__model_101_public
open-llm-leaderboard
2023-11-08T17:03:15Z
0
0
null
[ "region:us" ]
2023-11-08T17:03:15Z
2023-11-08T17:03:06.000Z
2023-11-08T17:03:06
--- pretty_name: Evaluation run of psmathur/model_101 dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [psmathur/model_101](https://huggingface.co/psmathur/model_101) on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 3 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_psmathur__model_101_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-08T17:02:48.057771](https://huggingface.co/datasets/open-llm-leaderboard/details_psmathur__model_101_public/blob/main/results_2023-11-08T17-02-48.057771.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.42701342281879195,\n\ \ \"em_stderr\": 0.0050656203622111255,\n \"f1\": 0.5510203439597356,\n\ \ \"f1_stderr\": 0.004541060645215944,\n \"acc\": 0.6344516695758009,\n\ \ \"acc_stderr\": 0.012237970912161255\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.42701342281879195,\n \"em_stderr\": 0.0050656203622111255,\n\ \ \"f1\": 0.5510203439597356,\n \"f1_stderr\": 0.004541060645215944\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.44806671721000757,\n \ \ \"acc_stderr\": 0.013697992668274525\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.8208366219415943,\n \"acc_stderr\": 0.010777949156047986\n\ \ }\n}\n```" repo_url: https://huggingface.co/psmathur/model_101 leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_drop_3 data_files: - split: 2023_11_08T17_02_48.057771 path: - '**/details_harness|drop|3_2023-11-08T17-02-48.057771.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-08T17-02-48.057771.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_08T17_02_48.057771 path: - '**/details_harness|gsm8k|5_2023-11-08T17-02-48.057771.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-08T17-02-48.057771.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_08T17_02_48.057771 path: - '**/details_harness|winogrande|5_2023-11-08T17-02-48.057771.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-08T17-02-48.057771.parquet' - config_name: results data_files: - split: 2023_11_08T17_02_48.057771 path: - results_2023-11-08T17-02-48.057771.parquet - split: latest path: - results_2023-11-08T17-02-48.057771.parquet --- # Dataset Card for Evaluation run of psmathur/model_101 ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/psmathur/model_101 - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [psmathur/model_101](https://huggingface.co/psmathur/model_101) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_psmathur__model_101_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T17:02:48.057771](https://huggingface.co/datasets/open-llm-leaderboard/details_psmathur__model_101_public/blob/main/results_2023-11-08T17-02-48.057771.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.42701342281879195, "em_stderr": 0.0050656203622111255, "f1": 0.5510203439597356, "f1_stderr": 0.004541060645215944, "acc": 0.6344516695758009, "acc_stderr": 0.012237970912161255 }, "harness|drop|3": { "em": 0.42701342281879195, "em_stderr": 0.0050656203622111255, "f1": 0.5510203439597356, "f1_stderr": 0.004541060645215944 }, "harness|gsm8k|5": { "acc": 0.44806671721000757, "acc_stderr": 0.013697992668274525 }, "harness|winogrande|5": { "acc": 0.8208366219415943, "acc_stderr": 0.010777949156047986 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.3821530342102051, -0.5994827747344971, 0.28342029452323914, 0.21210530400276184, -0.23131898045539856, 0.13554473221302032, -0.35164618492126465, -0.011075037531554699, 0.3819383382797241, 0.5277174115180969, -0.7693025469779968, -0.8933956623077393, -0.7048317790031433, 0.1941964477300...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_internlm__internlm-20b-chat_public
open-llm-leaderboard
2023-11-08T17:10:39Z
0
0
null
[ "region:us" ]
2023-11-08T17:10:39Z
2023-11-08T17:10:30.000Z
2023-11-08T17:10:30
--- pretty_name: Evaluation run of internlm/internlm-20b-chat dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [internlm/internlm-20b-chat](https://huggingface.co/internlm/internlm-20b-chat)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 3 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_internlm__internlm-20b-chat_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-08T17:10:14.815999](https://huggingface.co/datasets/open-llm-leaderboard/details_internlm__internlm-20b-chat_public/blob/main/results_2023-11-08T17-10-14.815999.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"em\": 0.016149328859060404,\n\ \ \"em_stderr\": 0.001290866955681033,\n \"f1\": 0.10652579697986504,\n\ \ \"f1_stderr\": 0.0024253657455140664,\n \"acc\": 0.4874752643801689,\n\ \ \"acc_stderr\": 0.011119649443380293\n },\n \"harness|drop|3\": {\n\ \ \"em\": 0.016149328859060404,\n \"em_stderr\": 0.001290866955681033,\n\ \ \"f1\": 0.10652579697986504,\n \"f1_stderr\": 0.0024253657455140664\n\ \ },\n \"harness|gsm8k|5\": {\n \"acc\": 0.18726307808946172,\n \ \ \"acc_stderr\": 0.01074591419951081\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7876874506708761,\n \"acc_stderr\": 0.011493384687249779\n\ \ }\n}\n```" repo_url: https://huggingface.co/internlm/internlm-20b-chat leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_drop_3 data_files: - split: 2023_11_08T17_10_14.815999 path: - '**/details_harness|drop|3_2023-11-08T17-10-14.815999.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-08T17-10-14.815999.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_08T17_10_14.815999 path: - '**/details_harness|gsm8k|5_2023-11-08T17-10-14.815999.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-08T17-10-14.815999.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_08T17_10_14.815999 path: - '**/details_harness|winogrande|5_2023-11-08T17-10-14.815999.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-08T17-10-14.815999.parquet' - config_name: results data_files: - split: 2023_11_08T17_10_14.815999 path: - results_2023-11-08T17-10-14.815999.parquet - split: latest path: - results_2023-11-08T17-10-14.815999.parquet --- # Dataset Card for Evaluation run of internlm/internlm-20b-chat ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/internlm/internlm-20b-chat - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [internlm/internlm-20b-chat](https://huggingface.co/internlm/internlm-20b-chat) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 3 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_internlm__internlm-20b-chat_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T17:10:14.815999](https://huggingface.co/datasets/open-llm-leaderboard/details_internlm__internlm-20b-chat_public/blob/main/results_2023-11-08T17-10-14.815999.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "em": 0.016149328859060404, "em_stderr": 0.001290866955681033, "f1": 0.10652579697986504, "f1_stderr": 0.0024253657455140664, "acc": 0.4874752643801689, "acc_stderr": 0.011119649443380293 }, "harness|drop|3": { "em": 0.016149328859060404, "em_stderr": 0.001290866955681033, "f1": 0.10652579697986504, "f1_stderr": 0.0024253657455140664 }, "harness|gsm8k|5": { "acc": 0.18726307808946172, "acc_stderr": 0.01074591419951081 }, "harness|winogrande|5": { "acc": 0.7876874506708761, "acc_stderr": 0.011493384687249779 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.4320862591266632, -0.7375891804695129, 0.11528951674699783, 0.3716990351676941, -0.13859114050865173, 0.18996188044548035, -0.42303740978240967, -0.2342311143875122, 0.31739944219589233, 0.47528767585754395, -0.6650539636611938, -0.9868241548538208, -0.6000322103500366, 0.14542429149150...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_80_15
arieg
2023-11-08T17:10:52Z
0
0
null
[ "region:us" ]
2023-11-08T17:10:52Z
2023-11-08T17:10:46.000Z
2023-11-08T17:10:46
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '40842' '1': '40843' '2': '40851' '3': '40903' '4': '40908' '5': '40909' '6': '40938' '7': '40940' '8': '40985' '9': '40986' '10': '41018' '11': '41054' '12': '41095' '13': '41147' '14': '41191' '15': '41192' '16': '41568' '17': '41570' '18': '41573' '19': '41605' '20': '41709' '21': '41714' '22': '41812' '23': '41961' '24': '41962' '25': '41965' '26': '41983' '27': '42014' '28': '42016' '29': '42017' '30': '42019' '31': '42020' '32': '42023' '33': '42025' '34': '42029' '35': '42030' '36': '42031' '37': '42044' '38': '42045' '39': '42046' '40': '42048' '41': '42119' '42': '42126' '43': '42234' '44': '42235' '45': '42236' '46': '42238' '47': '42240' '48': '42241' '49': '42243' '50': '42245' '51': '42247' '52': '42310' '53': '42442' '54': '42475' '55': '42648' '56': '42659' '57': '42751' '58': '42761' '59': '42789' '60': '42966' '61': '42984' '62': '43016' '63': '43018' '64': '43019' '65': '43020' '66': '43021' '67': '43022' '68': '43023' '69': '43024' '70': '43025' '71': '43026' '72': '43027' '73': '43028' '74': '43029' '75': '43030' '76': '43063' '77': '43172' '78': '43173' '79': '43533' splits: - name: train num_bytes: 87445438.4 num_examples: 1600 download_size: 87152132 dataset_size: 87445438.4 --- # Dataset Card for "bw_spec_cls_80_15" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7616842985153198, -0.23801949620246887, 0.18230310082435608, 0.38153642416000366, -0.2802785634994507, -0.12861499190330505, 0.013099635019898415, -0.3626108765602112, 0.5615656971931458, 0.466219425201416, -0.799599826335907, -0.8302854895591736, -0.5333609580993652, -0.082856848835945...
null
null
null
null
null
null
null
null
null
null
null
null
null
open-llm-leaderboard/details_Weyaxi__TekniumAiroboros-Nebula-7B_public
open-llm-leaderboard
2023-11-08T17:23:24Z
0
0
null
[ "region:us" ]
2023-11-08T17:23:24Z
2023-11-08T17:22:18.000Z
2023-11-08T17:22:18
--- pretty_name: Evaluation run of Weyaxi/TekniumAiroboros-Nebula-7B dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [Weyaxi/TekniumAiroboros-Nebula-7B](https://huggingface.co/Weyaxi/TekniumAiroboros-Nebula-7B)\ \ on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\ \nThe dataset is composed of 64 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run (and is used to compute and display the aggregated metrics on the [Open LLM\ \ Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\ \nTo load the details from a run, you can for instance do the following:\n```python\n\ from datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Weyaxi__TekniumAiroboros-Nebula-7B_public\"\ ,\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\n\ These are the [latest results from run 2023-11-08T17:19:18.874101](https://huggingface.co/datasets/open-llm-leaderboard/details_Weyaxi__TekniumAiroboros-Nebula-7B_public/blob/main/results_2023-11-08T17-19-18.874101.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5486392579762343,\n\ \ \"acc_stderr\": 0.03374349666110138,\n \"acc_norm\": 0.5573176954338681,\n\ \ \"acc_norm_stderr\": 0.03452133425560909,\n \"mc1\": 0.34761321909424725,\n\ \ \"mc1_stderr\": 0.016670769188897303,\n \"mc2\": 0.5163563481502564,\n\ \ \"mc2_stderr\": 0.015552871980185646,\n \"em\": 0.34721057046979864,\n\ \ \"em_stderr\": 0.004875539476141144,\n \"f1\": 0.4132676174496653,\n\ \ \"f1_stderr\": 0.004730745670924258\n },\n \"harness|arc:challenge|25\"\ : {\n \"acc\": 0.5264505119453925,\n \"acc_stderr\": 0.01459093135812017,\n\ \ \"acc_norm\": 0.5716723549488054,\n \"acc_norm_stderr\": 0.014460496367599019\n\ \ },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6211909978092014,\n\ \ \"acc_stderr\": 0.004840990593494692,\n \"acc_norm\": 0.817167894841665,\n\ \ \"acc_norm_stderr\": 0.0038573886135331004\n },\n \"harness|hendrycksTest-abstract_algebra|5\"\ : {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \ \ \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n \ \ },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5259259259259259,\n\ \ \"acc_stderr\": 0.04313531696750575,\n \"acc_norm\": 0.5259259259259259,\n\ \ \"acc_norm_stderr\": 0.04313531696750575\n },\n \"harness|hendrycksTest-astronomy|5\"\ : {\n \"acc\": 0.5131578947368421,\n \"acc_stderr\": 0.04067533136309174,\n\ \ \"acc_norm\": 0.5131578947368421,\n \"acc_norm_stderr\": 0.04067533136309174\n\ \ },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.53,\n\ \ \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \ \ \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-clinical_knowledge|5\"\ : {\n \"acc\": 0.6075471698113207,\n \"acc_stderr\": 0.030052580579557845,\n\ \ \"acc_norm\": 0.6075471698113207,\n \"acc_norm_stderr\": 0.030052580579557845\n\ \ },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6527777777777778,\n\ \ \"acc_stderr\": 0.039812405437178615,\n \"acc_norm\": 0.6527777777777778,\n\ \ \"acc_norm_stderr\": 0.039812405437178615\n },\n \"harness|hendrycksTest-college_chemistry|5\"\ : {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \ \ \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n\ \ \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.34,\n\ \ \"acc_stderr\": 0.047609522856952344,\n \"acc_norm\": 0.34,\n \ \ \"acc_norm_stderr\": 0.047609522856952344\n },\n \"harness|hendrycksTest-college_mathematics|5\"\ : {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \ \ \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n \ \ },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5202312138728323,\n\ \ \"acc_stderr\": 0.03809342081273958,\n \"acc_norm\": 0.5202312138728323,\n\ \ \"acc_norm_stderr\": 0.03809342081273958\n },\n \"harness|hendrycksTest-college_physics|5\"\ : {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.04389869956808777,\n\ \ \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.04389869956808777\n\ \ },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\":\ \ 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n\ \ \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-conceptual_physics|5\"\ : {\n \"acc\": 0.5148936170212766,\n \"acc_stderr\": 0.032671518489247764,\n\ \ \"acc_norm\": 0.5148936170212766,\n \"acc_norm_stderr\": 0.032671518489247764\n\ \ },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n\ \ \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.4473684210526316,\n\ \ \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\"\ : {\n \"acc\": 0.4896551724137931,\n \"acc_stderr\": 0.04165774775728763,\n\ \ \"acc_norm\": 0.4896551724137931,\n \"acc_norm_stderr\": 0.04165774775728763\n\ \ },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\"\ : 0.36772486772486773,\n \"acc_stderr\": 0.024833839825562427,\n \"\ acc_norm\": 0.36772486772486773,\n \"acc_norm_stderr\": 0.024833839825562427\n\ \ },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n\ \ \"acc_stderr\": 0.04390259265377563,\n \"acc_norm\": 0.40476190476190477,\n\ \ \"acc_norm_stderr\": 0.04390259265377563\n },\n \"harness|hendrycksTest-global_facts|5\"\ : {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \ \ \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n \ \ },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\"\ : 0.6548387096774193,\n \"acc_stderr\": 0.02704574657353433,\n \"\ acc_norm\": 0.6548387096774193,\n \"acc_norm_stderr\": 0.02704574657353433\n\ \ },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\"\ : 0.3793103448275862,\n \"acc_stderr\": 0.034139638059062345,\n \"\ acc_norm\": 0.3793103448275862,\n \"acc_norm_stderr\": 0.034139638059062345\n\ \ },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \ \ \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\"\ : 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-high_school_european_history|5\"\ : {\n \"acc\": 0.7212121212121212,\n \"acc_stderr\": 0.03501438706296781,\n\ \ \"acc_norm\": 0.7212121212121212,\n \"acc_norm_stderr\": 0.03501438706296781\n\ \ },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\"\ : 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"\ acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n\ \ },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n\ \ \"acc\": 0.7823834196891192,\n \"acc_stderr\": 0.029778663037752954,\n\ \ \"acc_norm\": 0.7823834196891192,\n \"acc_norm_stderr\": 0.029778663037752954\n\ \ },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \ \ \"acc\": 0.5333333333333333,\n \"acc_stderr\": 0.02529460802398647,\n \ \ \"acc_norm\": 0.5333333333333333,\n \"acc_norm_stderr\": 0.02529460802398647\n\ \ },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"\ acc\": 0.2814814814814815,\n \"acc_stderr\": 0.02742001935094527,\n \ \ \"acc_norm\": 0.2814814814814815,\n \"acc_norm_stderr\": 0.02742001935094527\n\ \ },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \ \ \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.032252942323996406,\n\ \ \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.032252942323996406\n\ \ },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\"\ : 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"\ acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n\ \ },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\"\ : 0.7688073394495413,\n \"acc_stderr\": 0.018075750241633146,\n \"\ acc_norm\": 0.7688073394495413,\n \"acc_norm_stderr\": 0.018075750241633146\n\ \ },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\"\ : 0.33796296296296297,\n \"acc_stderr\": 0.03225941352631295,\n \"\ acc_norm\": 0.33796296296296297,\n \"acc_norm_stderr\": 0.03225941352631295\n\ \ },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\"\ : 0.7401960784313726,\n \"acc_stderr\": 0.030778554678693257,\n \"\ acc_norm\": 0.7401960784313726,\n \"acc_norm_stderr\": 0.030778554678693257\n\ \ },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"\ acc\": 0.7088607594936709,\n \"acc_stderr\": 0.02957160106575337,\n \ \ \"acc_norm\": 0.7088607594936709,\n \"acc_norm_stderr\": 0.02957160106575337\n\ \ },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6367713004484304,\n\ \ \"acc_stderr\": 0.032277904428505,\n \"acc_norm\": 0.6367713004484304,\n\ \ \"acc_norm_stderr\": 0.032277904428505\n },\n \"harness|hendrycksTest-human_sexuality|5\"\ : {\n \"acc\": 0.6259541984732825,\n \"acc_stderr\": 0.042438692422305246,\n\ \ \"acc_norm\": 0.6259541984732825,\n \"acc_norm_stderr\": 0.042438692422305246\n\ \ },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\":\ \ 0.71900826446281,\n \"acc_stderr\": 0.04103203830514512,\n \"acc_norm\"\ : 0.71900826446281,\n \"acc_norm_stderr\": 0.04103203830514512\n },\n\ \ \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6759259259259259,\n\ \ \"acc_stderr\": 0.045245960070300476,\n \"acc_norm\": 0.6759259259259259,\n\ \ \"acc_norm_stderr\": 0.045245960070300476\n },\n \"harness|hendrycksTest-logical_fallacies|5\"\ : {\n \"acc\": 0.6809815950920245,\n \"acc_stderr\": 0.03661997551073836,\n\ \ \"acc_norm\": 0.6809815950920245,\n \"acc_norm_stderr\": 0.03661997551073836\n\ \ },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n\ \ \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \ \ \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\"\ : {\n \"acc\": 0.7087378640776699,\n \"acc_stderr\": 0.044986763205729224,\n\ \ \"acc_norm\": 0.7087378640776699,\n \"acc_norm_stderr\": 0.044986763205729224\n\ \ },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n\ \ \"acc_stderr\": 0.022801382534597524,\n \"acc_norm\": 0.8589743589743589,\n\ \ \"acc_norm_stderr\": 0.022801382534597524\n },\n \"harness|hendrycksTest-medical_genetics|5\"\ : {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709390974,\n \ \ \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709390974\n \ \ },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.756066411238825,\n\ \ \"acc_stderr\": 0.015357212665829465,\n \"acc_norm\": 0.756066411238825,\n\ \ \"acc_norm_stderr\": 0.015357212665829465\n },\n \"harness|hendrycksTest-moral_disputes|5\"\ : {\n \"acc\": 0.5867052023121387,\n \"acc_stderr\": 0.026511261369409247,\n\ \ \"acc_norm\": 0.5867052023121387,\n \"acc_norm_stderr\": 0.026511261369409247\n\ \ },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3340782122905028,\n\ \ \"acc_stderr\": 0.01577491142238163,\n \"acc_norm\": 0.3340782122905028,\n\ \ \"acc_norm_stderr\": 0.01577491142238163\n },\n \"harness|hendrycksTest-nutrition|5\"\ : {\n \"acc\": 0.6045751633986928,\n \"acc_stderr\": 0.027996723180631435,\n\ \ \"acc_norm\": 0.6045751633986928,\n \"acc_norm_stderr\": 0.027996723180631435\n\ \ },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6237942122186495,\n\ \ \"acc_stderr\": 0.027513925683549434,\n \"acc_norm\": 0.6237942122186495,\n\ \ \"acc_norm_stderr\": 0.027513925683549434\n },\n \"harness|hendrycksTest-prehistory|5\"\ : {\n \"acc\": 0.6388888888888888,\n \"acc_stderr\": 0.026725868809100797,\n\ \ \"acc_norm\": 0.6388888888888888,\n \"acc_norm_stderr\": 0.026725868809100797\n\ \ },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"\ acc\": 0.39361702127659576,\n \"acc_stderr\": 0.029144544781596154,\n \ \ \"acc_norm\": 0.39361702127659576,\n \"acc_norm_stderr\": 0.029144544781596154\n\ \ },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.43415906127770537,\n\ \ \"acc_stderr\": 0.01265903323706725,\n \"acc_norm\": 0.43415906127770537,\n\ \ \"acc_norm_stderr\": 0.01265903323706725\n },\n \"harness|hendrycksTest-professional_medicine|5\"\ : {\n \"acc\": 0.49264705882352944,\n \"acc_stderr\": 0.030369552523902173,\n\ \ \"acc_norm\": 0.49264705882352944,\n \"acc_norm_stderr\": 0.030369552523902173\n\ \ },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"\ acc\": 0.565359477124183,\n \"acc_stderr\": 0.02005426920072646,\n \ \ \"acc_norm\": 0.565359477124183,\n \"acc_norm_stderr\": 0.02005426920072646\n\ \ },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6272727272727273,\n\ \ \"acc_stderr\": 0.04631381319425465,\n \"acc_norm\": 0.6272727272727273,\n\ \ \"acc_norm_stderr\": 0.04631381319425465\n },\n \"harness|hendrycksTest-security_studies|5\"\ : {\n \"acc\": 0.5183673469387755,\n \"acc_stderr\": 0.03198761546763127,\n\ \ \"acc_norm\": 0.5183673469387755,\n \"acc_norm_stderr\": 0.03198761546763127\n\ \ },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7661691542288557,\n\ \ \"acc_stderr\": 0.029929415408348384,\n \"acc_norm\": 0.7661691542288557,\n\ \ \"acc_norm_stderr\": 0.029929415408348384\n },\n \"harness|hendrycksTest-us_foreign_policy|5\"\ : {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \ \ \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n \ \ },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4578313253012048,\n\ \ \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.4578313253012048,\n\ \ \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\"\ : {\n \"acc\": 0.7485380116959064,\n \"acc_stderr\": 0.033275044238468436,\n\ \ \"acc_norm\": 0.7485380116959064,\n \"acc_norm_stderr\": 0.033275044238468436\n\ \ },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.34761321909424725,\n\ \ \"mc1_stderr\": 0.016670769188897303,\n \"mc2\": 0.5163563481502564,\n\ \ \"mc2_stderr\": 0.015552871980185646\n },\n \"harness|winogrande|5\"\ : {\n \"acc\": 0.7324388318863457,\n \"acc_stderr\": 0.012441718456893009\n\ \ },\n \"harness|drop|3\": {\n \"em\": 0.34721057046979864,\n \ \ \"em_stderr\": 0.004875539476141144,\n \"f1\": 0.4132676174496653,\n\ \ \"f1_stderr\": 0.004730745670924258\n },\n \"harness|gsm8k|5\": {\n\ \ \"acc\": 0.09401061410159212,\n \"acc_stderr\": 0.008038819818872476\n\ \ }\n}\n```" repo_url: https://huggingface.co/Weyaxi/TekniumAiroboros-Nebula-7B leaderboard_url: https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard point_of_contact: clementine@hf.co configs: - config_name: harness_arc_challenge_25 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|arc:challenge|25_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|arc:challenge|25_2023-11-08T17-19-18.874101.parquet' - config_name: harness_drop_3 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|drop|3_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|drop|3_2023-11-08T17-19-18.874101.parquet' - config_name: harness_gsm8k_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|gsm8k|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|gsm8k|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hellaswag_10 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hellaswag|10_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hellaswag|10_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-management|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-virology|5_2023-11-08T17-19-18.874101.parquet' - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_abstract_algebra_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-abstract_algebra|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_anatomy_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-anatomy|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_astronomy_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-astronomy|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_business_ethics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-business_ethics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_clinical_knowledge_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-clinical_knowledge|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_college_biology_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_biology|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_college_chemistry_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_chemistry|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_college_computer_science_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_computer_science|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_college_mathematics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_mathematics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_college_medicine_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_medicine|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_college_physics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-college_physics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_computer_security_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-computer_security|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_conceptual_physics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-conceptual_physics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_econometrics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-econometrics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_electrical_engineering_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-electrical_engineering|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_elementary_mathematics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-elementary_mathematics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_formal_logic_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-formal_logic|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_global_facts_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-global_facts|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_biology_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_biology|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_chemistry_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_chemistry|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_computer_science_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_computer_science|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_european_history_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_european_history|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_geography_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_geography|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_government_and_politics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_government_and_politics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_macroeconomics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_macroeconomics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_mathematics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_mathematics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_microeconomics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_microeconomics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_physics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_physics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_psychology_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_psychology|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_statistics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_statistics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_us_history_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_us_history|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_high_school_world_history_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-high_school_world_history|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_human_aging_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_aging|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_human_sexuality_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-human_sexuality|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_international_law_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-international_law|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_jurisprudence_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-jurisprudence|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_logical_fallacies_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-logical_fallacies|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_machine_learning_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-machine_learning|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_management_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-management|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-management|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_marketing_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-marketing|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_medical_genetics_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-medical_genetics|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_miscellaneous_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-miscellaneous|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_moral_disputes_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_disputes|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_moral_scenarios_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-moral_scenarios|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_nutrition_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-nutrition|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_philosophy_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-philosophy|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_prehistory_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-prehistory|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_professional_accounting_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_accounting|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_professional_law_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_law|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_professional_medicine_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_medicine|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_professional_psychology_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-professional_psychology|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_public_relations_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-public_relations|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_security_studies_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-security_studies|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_sociology_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-sociology|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_us_foreign_policy_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-us_foreign_policy|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_virology_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-virology|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-virology|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_hendrycksTest_world_religions_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|hendrycksTest-world_religions|5_2023-11-08T17-19-18.874101.parquet' - config_name: harness_truthfulqa_mc_0 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|truthfulqa:mc|0_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|truthfulqa:mc|0_2023-11-08T17-19-18.874101.parquet' - config_name: harness_winogrande_5 data_files: - split: 2023_11_08T17_19_18.874101 path: - '**/details_harness|winogrande|5_2023-11-08T17-19-18.874101.parquet' - split: latest path: - '**/details_harness|winogrande|5_2023-11-08T17-19-18.874101.parquet' - config_name: results data_files: - split: 2023_11_08T17_19_18.874101 path: - results_2023-11-08T17-19-18.874101.parquet - split: latest path: - results_2023-11-08T17-19-18.874101.parquet --- # Dataset Card for Evaluation run of Weyaxi/TekniumAiroboros-Nebula-7B ## Dataset Description - **Homepage:** - **Repository:** https://huggingface.co/Weyaxi/TekniumAiroboros-Nebula-7B - **Paper:** - **Leaderboard:** https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard - **Point of Contact:** clementine@hf.co ### Dataset Summary Dataset automatically created during the evaluation run of model [Weyaxi/TekniumAiroboros-Nebula-7B](https://huggingface.co/Weyaxi/TekniumAiroboros-Nebula-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 64 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Weyaxi__TekniumAiroboros-Nebula-7B_public", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2023-11-08T17:19:18.874101](https://huggingface.co/datasets/open-llm-leaderboard/details_Weyaxi__TekniumAiroboros-Nebula-7B_public/blob/main/results_2023-11-08T17-19-18.874101.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5486392579762343, "acc_stderr": 0.03374349666110138, "acc_norm": 0.5573176954338681, "acc_norm_stderr": 0.03452133425560909, "mc1": 0.34761321909424725, "mc1_stderr": 0.016670769188897303, "mc2": 0.5163563481502564, "mc2_stderr": 0.015552871980185646, "em": 0.34721057046979864, "em_stderr": 0.004875539476141144, "f1": 0.4132676174496653, "f1_stderr": 0.004730745670924258 }, "harness|arc:challenge|25": { "acc": 0.5264505119453925, "acc_stderr": 0.01459093135812017, "acc_norm": 0.5716723549488054, "acc_norm_stderr": 0.014460496367599019 }, "harness|hellaswag|10": { "acc": 0.6211909978092014, "acc_stderr": 0.004840990593494692, "acc_norm": 0.817167894841665, "acc_norm_stderr": 0.0038573886135331004 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5259259259259259, "acc_stderr": 0.04313531696750575, "acc_norm": 0.5259259259259259, "acc_norm_stderr": 0.04313531696750575 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5131578947368421, "acc_stderr": 0.04067533136309174, "acc_norm": 0.5131578947368421, "acc_norm_stderr": 0.04067533136309174 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6075471698113207, "acc_stderr": 0.030052580579557845, "acc_norm": 0.6075471698113207, "acc_norm_stderr": 0.030052580579557845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6527777777777778, "acc_stderr": 0.039812405437178615, "acc_norm": 0.6527777777777778, "acc_norm_stderr": 0.039812405437178615 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.34, "acc_stderr": 0.047609522856952344, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952344 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5202312138728323, "acc_stderr": 0.03809342081273958, "acc_norm": 0.5202312138728323, "acc_norm_stderr": 0.03809342081273958 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.2647058823529412, "acc_stderr": 0.04389869956808777, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.04389869956808777 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5148936170212766, "acc_stderr": 0.032671518489247764, "acc_norm": 0.5148936170212766, "acc_norm_stderr": 0.032671518489247764 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.04677473004491199, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4896551724137931, "acc_stderr": 0.04165774775728763, "acc_norm": 0.4896551724137931, "acc_norm_stderr": 0.04165774775728763 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.36772486772486773, "acc_stderr": 0.024833839825562427, "acc_norm": 0.36772486772486773, "acc_norm_stderr": 0.024833839825562427 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.04390259265377563, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.04390259265377563 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6548387096774193, "acc_stderr": 0.02704574657353433, "acc_norm": 0.6548387096774193, "acc_norm_stderr": 0.02704574657353433 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3793103448275862, "acc_stderr": 0.034139638059062345, "acc_norm": 0.3793103448275862, "acc_norm_stderr": 0.034139638059062345 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7212121212121212, "acc_stderr": 0.03501438706296781, "acc_norm": 0.7212121212121212, "acc_norm_stderr": 0.03501438706296781 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7272727272727273, "acc_stderr": 0.03173071239071724, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.03173071239071724 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7823834196891192, "acc_stderr": 0.029778663037752954, "acc_norm": 0.7823834196891192, "acc_norm_stderr": 0.029778663037752954 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5333333333333333, "acc_stderr": 0.02529460802398647, "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.02529460802398647 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2814814814814815, "acc_stderr": 0.02742001935094527, "acc_norm": 0.2814814814814815, "acc_norm_stderr": 0.02742001935094527 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5588235294117647, "acc_stderr": 0.032252942323996406, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.032252942323996406 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7688073394495413, "acc_stderr": 0.018075750241633146, "acc_norm": 0.7688073394495413, "acc_norm_stderr": 0.018075750241633146 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.33796296296296297, "acc_stderr": 0.03225941352631295, "acc_norm": 0.33796296296296297, "acc_norm_stderr": 0.03225941352631295 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7401960784313726, "acc_stderr": 0.030778554678693257, "acc_norm": 0.7401960784313726, "acc_norm_stderr": 0.030778554678693257 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7088607594936709, "acc_stderr": 0.02957160106575337, "acc_norm": 0.7088607594936709, "acc_norm_stderr": 0.02957160106575337 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6367713004484304, "acc_stderr": 0.032277904428505, "acc_norm": 0.6367713004484304, "acc_norm_stderr": 0.032277904428505 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6259541984732825, "acc_stderr": 0.042438692422305246, "acc_norm": 0.6259541984732825, "acc_norm_stderr": 0.042438692422305246 }, "harness|hendrycksTest-international_law|5": { "acc": 0.71900826446281, "acc_stderr": 0.04103203830514512, "acc_norm": 0.71900826446281, "acc_norm_stderr": 0.04103203830514512 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6759259259259259, "acc_stderr": 0.045245960070300476, "acc_norm": 0.6759259259259259, "acc_norm_stderr": 0.045245960070300476 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6809815950920245, "acc_stderr": 0.03661997551073836, "acc_norm": 0.6809815950920245, "acc_norm_stderr": 0.03661997551073836 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.7087378640776699, "acc_stderr": 0.044986763205729224, "acc_norm": 0.7087378640776699, "acc_norm_stderr": 0.044986763205729224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597524, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597524 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.63, "acc_stderr": 0.048523658709390974, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709390974 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.756066411238825, "acc_stderr": 0.015357212665829465, "acc_norm": 0.756066411238825, "acc_norm_stderr": 0.015357212665829465 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5867052023121387, "acc_stderr": 0.026511261369409247, "acc_norm": 0.5867052023121387, "acc_norm_stderr": 0.026511261369409247 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3340782122905028, "acc_stderr": 0.01577491142238163, "acc_norm": 0.3340782122905028, "acc_norm_stderr": 0.01577491142238163 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6045751633986928, "acc_stderr": 0.027996723180631435, "acc_norm": 0.6045751633986928, "acc_norm_stderr": 0.027996723180631435 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6237942122186495, "acc_stderr": 0.027513925683549434, "acc_norm": 0.6237942122186495, "acc_norm_stderr": 0.027513925683549434 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6388888888888888, "acc_stderr": 0.026725868809100797, "acc_norm": 0.6388888888888888, "acc_norm_stderr": 0.026725868809100797 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.39361702127659576, "acc_stderr": 0.029144544781596154, "acc_norm": 0.39361702127659576, "acc_norm_stderr": 0.029144544781596154 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.43415906127770537, "acc_stderr": 0.01265903323706725, "acc_norm": 0.43415906127770537, "acc_norm_stderr": 0.01265903323706725 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.49264705882352944, "acc_stderr": 0.030369552523902173, "acc_norm": 0.49264705882352944, "acc_norm_stderr": 0.030369552523902173 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.565359477124183, "acc_stderr": 0.02005426920072646, "acc_norm": 0.565359477124183, "acc_norm_stderr": 0.02005426920072646 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6272727272727273, "acc_stderr": 0.04631381319425465, "acc_norm": 0.6272727272727273, "acc_norm_stderr": 0.04631381319425465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5183673469387755, "acc_stderr": 0.03198761546763127, "acc_norm": 0.5183673469387755, "acc_norm_stderr": 0.03198761546763127 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7661691542288557, "acc_stderr": 0.029929415408348384, "acc_norm": 0.7661691542288557, "acc_norm_stderr": 0.029929415408348384 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-virology|5": { "acc": 0.4578313253012048, "acc_stderr": 0.0387862677100236, "acc_norm": 0.4578313253012048, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7485380116959064, "acc_stderr": 0.033275044238468436, "acc_norm": 0.7485380116959064, "acc_norm_stderr": 0.033275044238468436 }, "harness|truthfulqa:mc|0": { "mc1": 0.34761321909424725, "mc1_stderr": 0.016670769188897303, "mc2": 0.5163563481502564, "mc2_stderr": 0.015552871980185646 }, "harness|winogrande|5": { "acc": 0.7324388318863457, "acc_stderr": 0.012441718456893009 }, "harness|drop|3": { "em": 0.34721057046979864, "em_stderr": 0.004875539476141144, "f1": 0.4132676174496653, "f1_stderr": 0.004730745670924258 }, "harness|gsm8k|5": { "acc": 0.09401061410159212, "acc_stderr": 0.008038819818872476 } } ``` ### Supported Tasks and Leaderboards [More Information Needed] ### Languages [More Information Needed] ## Dataset Structure ### Data Instances [More Information Needed] ### Data Fields [More Information Needed] ### Data Splits [More Information Needed] ## Dataset Creation ### Curation Rationale [More Information Needed] ### Source Data #### Initial Data Collection and Normalization [More Information Needed] #### Who are the source language producers? [More Information Needed] ### Annotations #### Annotation process [More Information Needed] #### Who are the annotators? [More Information Needed] ### Personal and Sensitive Information [More Information Needed] ## Considerations for Using the Data ### Social Impact of Dataset [More Information Needed] ### Discussion of Biases [More Information Needed] ### Other Known Limitations [More Information Needed] ## Additional Information ### Dataset Curators [More Information Needed] ### Licensing Information [More Information Needed] ### Citation Information [More Information Needed] ### Contributions [More Information Needed]
[ -0.7045329809188843, -0.8035106658935547, 0.2923806309700012, 0.19615185260772705, -0.21840181946754456, -0.035868577659130096, 0.03412766009569168, -0.19625315070152283, 0.5980671048164368, -0.06327338516712189, -0.4665053188800812, -0.7045983672142029, -0.4632300138473511, 0.239570841193...
null
null
null
null
null
null
null
null
null
null
null
null
null
atom-in-the-universe/bild-deduped-16
atom-in-the-universe
2023-11-26T18:10:28Z
0
0
null
[ "region:us" ]
2023-11-26T18:10:28Z
2023-11-08T17:30:58.000Z
2023-11-08T17:30:58
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
ChristophSchuhmann/DFN_378_emotion_detector
ChristophSchuhmann
2023-11-16T20:19:51Z
0
0
null
[ "region:us" ]
2023-11-16T20:19:51Z
2023-11-08T17:32:08.000Z
2023-11-08T17:32:08
Entry not found
[ -0.32276472449302673, -0.22568407654762268, 0.8622258901596069, 0.4346148371696472, -0.5282984972000122, 0.7012965679168701, 0.7915717363357544, 0.07618629932403564, 0.7746022939682007, 0.2563222646713257, -0.785281777381897, -0.22573848068714142, -0.9104482531547546, 0.5715669393539429, ...
null
null
null
null
null
null
null
null
null
null
null
null
null
arincon/llm-detect
arincon
2023-11-08T17:33:10Z
0
0
null
[ "region:us" ]
2023-11-08T17:33:10Z
2023-11-08T17:32:57.000Z
2023-11-08T17:32:57
--- configs: - config_name: default data_files: - split: '01' path: data/01-* - split: '02' path: data/02-* - split: '03' path: data/03-* - split: '04' path: data/04-* dataset_info: features: - name: text dtype: string - name: label dtype: int64 - name: source dtype: string - name: fold dtype: int64 - name: essay_id dtype: string - name: prompt dtype: string splits: - name: '01' num_bytes: 74744152 num_examples: 33259 - name: '02' num_bytes: 94518857 num_examples: 39785 - name: '03' num_bytes: 99824812 num_examples: 42206 - name: '04' num_bytes: 104524272 num_examples: 44206 download_size: 197583354 dataset_size: 373612093 --- # Dataset Card for "llm-detect" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7540055513381958, -0.2284487634897232, 0.494525283575058, 0.1176614761352539, -0.262960284948349, -0.08325592428445816, 0.2911962866783142, -0.3142068386077881, 0.8720006942749023, 0.3685583472251892, -0.9177579879760742, -0.957024097442627, -0.6969729661941528, -0.1627553403377533, -...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_80_16
arieg
2023-11-08T17:35:40Z
0
0
null
[ "region:us" ]
2023-11-08T17:35:40Z
2023-11-08T17:35:32.000Z
2023-11-08T17:35:32
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '43534' '1': '43535' '2': '43536' '3': '43585' '4': '43586' '5': '43587' '6': '43588' '7': '43589' '8': '43590' '9': '43592' '10': '43593' '11': '43594' '12': '43595' '13': '43596' '14': '43598' '15': '43599' '16': '43600' '17': '43608' '18': '43621' '19': '43623' '20': '43695' '21': '43696' '22': '43697' '23': '43698' '24': '43699' '25': '43761' '26': '43773' '27': '43796' '28': '43842' '29': '43843' '30': '43844' '31': '43856' '32': '43857' '33': '43858' '34': '43860' '35': '43861' '36': '43863' '37': '43865' '38': '43866' '39': '43867' '40': '43868' '41': '43869' '42': '43883' '43': '43886' '44': '43899' '45': '43911' '46': '43962' '47': '43965' '48': '44092' '49': '44110' '50': '44169' '51': '44236' '52': '44342' '53': '44347' '54': '44354' '55': '44778' '56': '44779' '57': '44780' '58': '44781' '59': '44782' '60': '44791' '61': '44792' '62': '44793' '63': '44794' '64': '44795' '65': '44796' '66': '44797' '67': '44798' '68': '44799' '69': '44801' '70': '44803' '71': '44804' '72': '44805' '73': '44806' '74': '44809' '75': '44820' '76': '44821' '77': '44822' '78': '44823' '79': '44848' splits: - name: train num_bytes: 90417910.4 num_examples: 1600 download_size: 89917143 dataset_size: 90417910.4 --- # Dataset Card for "bw_spec_cls_80_16" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.714430570602417, -0.21128737926483154, 0.20160654187202454, 0.2963922619819641, -0.3268697261810303, -0.14589811861515045, -0.01531636156141758, -0.3412986099720001, 0.5664129853248596, 0.48634058237075806, -0.7786093950271606, -0.8221737146377563, -0.5493693351745605, -0.13359335064888...
null
null
null
null
null
null
null
null
null
null
null
null
null
arthurmluz/wikilingua_data-wiki_gptextsum_results
arthurmluz
2023-11-08T17:51:07Z
0
0
null
[ "region:us" ]
2023-11-08T17:51:07Z
2023-11-08T17:49:20.000Z
2023-11-08T17:49:20
--- dataset_info: features: - name: id dtype: int64 - name: text dtype: string - name: summary dtype: string - name: gen_summary dtype: string - name: rouge struct: - name: rouge1 dtype: float64 - name: rouge2 dtype: float64 - name: rougeL dtype: float64 - name: rougeLsum dtype: float64 - name: bert struct: - name: f1 sequence: float64 - name: hashcode dtype: string - name: precision sequence: float64 - name: recall sequence: float64 splits: - name: validation num_bytes: 22119069 num_examples: 8165 download_size: 13030706 dataset_size: 22119069 configs: - config_name: default data_files: - split: validation path: data/validation-* --- # Dataset Card for "wikilingua_data-wiki_gptextsum_results" rouge= {'rouge1': 0.33197745689950786, 'rouge2': 0.12351364096382854, 'rougeL': 0.24922924859888235, 'rougeLsum': 0.24922924859888235} bert= {'precision': 0.761901833262096, 'recall': 0.7622503304255206, 'f1': 0.7612595360336023}
[ -0.30732184648513794, -0.5342002511024475, 0.10330216586589813, 0.016101961955428123, -0.3534669578075409, -0.2674279510974884, -0.48703092336654663, -0.08108607679605484, 0.7376065850257874, 0.27097758650779724, -0.37144309282302856, -0.8899716734886169, -0.7904841899871826, 0.10347218811...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_80_17
arieg
2023-11-08T18:00:38Z
0
0
null
[ "region:us" ]
2023-11-08T18:00:38Z
2023-11-08T18:00:31.000Z
2023-11-08T18:00:31
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '44849' '1': '44850' '2': '44851' '3': '44853' '4': '44854' '5': '44917' '6': '44918' '7': '44946' '8': '44947' '9': '44948' '10': '44949' '11': '44950' '12': '44951' '13': '44952' '14': '45099' '15': '45100' '16': '45101' '17': '45102' '18': '45103' '19': '45150' '20': '45151' '21': '45153' '22': '45154' '23': '45335' '24': '45387' '25': '45388' '26': '45389' '27': '45390' '28': '45391' '29': '45392' '30': '45393' '31': '45474' '32': '45475' '33': '45508' '34': '45513' '35': '45514' '36': '45515' '37': '45516' '38': '45517' '39': '45518' '40': '45519' '41': '45520' '42': '45521' '43': '45522' '44': '45523' '45': '45941' '46': '46024' '47': '46043' '48': '46058' '49': '46068' '50': '46078' '51': '46079' '52': '46157' '53': '46158' '54': '46159' '55': '46160' '56': '46161' '57': '46162' '58': '46238' '59': '46241' '60': '46525' '61': '46611' '62': '46711' '63': '46726' '64': '46732' '65': '46733' '66': '46736' '67': '46839' '68': '46840' '69': '46841' '70': '46842' '71': '46844' '72': '46846' '73': '46854' '74': '46855' '75': '46928' '76': '46930' '77': '47068' '78': '47069' '79': '47070' splits: - name: train num_bytes: 87575273.6 num_examples: 1600 download_size: 87221336 dataset_size: 87575273.6 --- # Dataset Card for "bw_spec_cls_80_17" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.6924037337303162, -0.20562317967414856, 0.1854948103427887, 0.28960609436035156, -0.3159531354904175, -0.14136192202568054, -0.03087305650115013, -0.31960174441337585, 0.5369694828987122, 0.4707012176513672, -0.7644780874252319, -0.8184657096862793, -0.5658202171325684, -0.1634248793125...
null
null
null
null
null
null
null
null
null
null
null
null
null
arthurmluz/temario_data-xlsum_gptextsum_results
arthurmluz
2023-11-08T18:19:54Z
0
0
null
[ "region:us" ]
2023-11-08T18:19:54Z
2023-11-08T18:13:22.000Z
2023-11-08T18:13:22
--- dataset_info: features: - name: id dtype: string - name: text dtype: string - name: summary dtype: string - name: gen_summary dtype: string - name: rouge struct: - name: rouge1 dtype: float64 - name: rouge2 dtype: float64 - name: rougeL dtype: float64 - name: rougeLsum dtype: float64 - name: bert struct: - name: f1 sequence: float64 - name: hashcode dtype: string - name: precision sequence: float64 - name: recall sequence: float64 splits: - name: validation num_bytes: 206481 num_examples: 25 download_size: 163467 dataset_size: 206481 configs: - config_name: default data_files: - split: validation path: data/validation-* --- # Dataset Card for "temario_data-xlsum_gptextsum_results" rouge= {'rouge1': 0.17896239871319466, 'rouge2': 0.06855215321331638, 'rougeL': 0.12007675671332124, 'rougeLsum': 0.12007675671332124} bert= {'precision': 0.738992075920105, 'recall': 0.6221855568885803, 'f1': 0.6748717761039734}
[ -0.1342211365699768, -0.35837557911872864, 0.3017933964729309, 0.06091829389333725, -0.4696837067604065, -0.0622904933989048, -0.30313563346862793, 0.037684209644794464, 0.7830328345298767, 0.2451784759759903, -0.22425012290477753, -1.0183980464935303, -0.9342451691627502, 0.05532105267047...
null
null
null
null
null
null
null
null
null
null
null
null
null
arieg/bw_spec_cls_80_18
arieg
2023-11-08T18:25:35Z
0
0
null
[ "region:us" ]
2023-11-08T18:25:35Z
2023-11-08T18:25:26.000Z
2023-11-08T18:25:26
--- configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: image dtype: image - name: label dtype: class_label: names: '0': '47071' '1': '47072' '2': '47073' '3': '47074' '4': '47075' '5': '47076' '6': '47077' '7': '47192' '8': '47193' '9': '47194' '10': '47195' '11': '47196' '12': '47197' '13': '47198' '14': '47199' '15': '47200' '16': '47201' '17': '47202' '18': '47260' '19': '47506' '20': '47510' '21': '47657' '22': '47658' '23': '47659' '24': '47660' '25': '47661' '26': '47662' '27': '47663' '28': '47665' '29': '47666' '30': '47670' '31': '47671' '32': '47707' '33': '47826' '34': '47835' '35': '47865' '36': '47868' '37': '47894' '38': '47895' '39': '47896' '40': '47897' '41': '47916' '42': '47921' '43': '48015' '44': '48042' '45': '48043' '46': '48044' '47': '48046' '48': '48269' '49': '48307' '50': '48367' '51': '48368' '52': '48369' '53': '48437' '54': '48439' '55': '48440' '56': '48442' '57': '48443' '58': '48444' '59': '48446' '60': '48450' '61': '48452' '62': '48453' '63': '48454' '64': '48456' '65': '48457' '66': '48462' '67': '48463' '68': '48464' '69': '48465' '70': '48466' '71': '48488' '72': '48489' '73': '48491' '74': '48492' '75': '48493' '76': '48494' '77': '48763' '78': '48808' '79': '48815' splits: - name: train num_bytes: 87516568.0 num_examples: 1600 download_size: 87788761 dataset_size: 87516568.0 --- # Dataset Card for "bw_spec_cls_80_18" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
[ -0.7144466638565063, -0.2658827602863312, 0.18288232386112213, 0.33573028445243835, -0.27709734439849854, -0.15991365909576416, -0.012977822683751583, -0.37266385555267334, 0.5546272397041321, 0.5344817042350769, -0.8118144869804382, -0.7571083307266235, -0.541278064250946, -0.121034562587...
null
null
null
null
null
null
null
null
null
null
null
null
null