| dataset_info: | |
| - config_name: chemical_biosciences | |
| features: | |
| - name: question | |
| dtype: string | |
| - name: answer | |
| dtype: int64 | |
| - name: A | |
| dtype: string | |
| - name: B | |
| dtype: string | |
| - name: C | |
| dtype: string | |
| - name: D | |
| dtype: string | |
| - name: Category | |
| dtype: string | |
| - name: Human Accuracy | |
| dtype: float64 | |
| splits: | |
| - name: dev | |
| num_bytes: 1376 | |
| num_examples: 5 | |
| - name: test | |
| num_bytes: 32176 | |
| num_examples: 120 | |
| download_size: 28620 | |
| dataset_size: 33552 | |
| - config_name: earth_life_sciences | |
| features: | |
| - name: question | |
| dtype: string | |
| - name: answer | |
| dtype: int64 | |
| - name: A | |
| dtype: string | |
| - name: B | |
| dtype: string | |
| - name: C | |
| dtype: string | |
| - name: D | |
| dtype: string | |
| - name: Category | |
| dtype: string | |
| - name: Human Accuracy | |
| dtype: float64 | |
| splits: | |
| - name: dev | |
| num_bytes: 1301 | |
| num_examples: 5 | |
| - name: test | |
| num_bytes: 29098 | |
| num_examples: 120 | |
| download_size: 26881 | |
| dataset_size: 30399 | |
| - config_name: liberal_arts_and_social_sciences | |
| features: | |
| - name: question | |
| dtype: string | |
| - name: answer | |
| dtype: int64 | |
| - name: A | |
| dtype: string | |
| - name: B | |
| dtype: string | |
| - name: C | |
| dtype: string | |
| - name: D | |
| dtype: string | |
| - name: Category | |
| dtype: string | |
| - name: Human Accuracy | |
| dtype: float64 | |
| splits: | |
| - name: dev | |
| num_bytes: 1097 | |
| num_examples: 5 | |
| - name: test | |
| num_bytes: 37448 | |
| num_examples: 145 | |
| download_size: 31197 | |
| dataset_size: 38545 | |
| - config_name: medical_science | |
| features: | |
| - name: question | |
| dtype: string | |
| - name: answer | |
| dtype: int64 | |
| - name: A | |
| dtype: string | |
| - name: B | |
| dtype: string | |
| - name: C | |
| dtype: string | |
| - name: D | |
| dtype: string | |
| - name: Category | |
| dtype: string | |
| - name: Human Accuracy | |
| dtype: float64 | |
| splits: | |
| - name: dev | |
| num_bytes: 1133 | |
| num_examples: 5 | |
| - name: test | |
| num_bytes: 27710 | |
| num_examples: 119 | |
| download_size: 25455 | |
| dataset_size: 28843 | |
| - config_name: physics_mathematics | |
| features: | |
| - name: question | |
| dtype: string | |
| - name: answer | |
| dtype: int64 | |
| - name: A | |
| dtype: string | |
| - name: B | |
| dtype: string | |
| - name: C | |
| dtype: string | |
| - name: D | |
| dtype: string | |
| - name: Category | |
| dtype: string | |
| - name: Human Accuracy | |
| dtype: float64 | |
| splits: | |
| - name: dev | |
| num_bytes: 1171 | |
| num_examples: 5 | |
| - name: test | |
| num_bytes: 28307 | |
| num_examples: 113 | |
| download_size: 26222 | |
| dataset_size: 29478 | |
| configs: | |
| - config_name: chemical_biosciences | |
| data_files: | |
| - split: dev | |
| path: chemical_biosciences/dev-* | |
| - split: test | |
| path: chemical_biosciences/test-* | |
| - config_name: earth_life_sciences | |
| data_files: | |
| - split: dev | |
| path: earth_life_sciences/dev-* | |
| - split: test | |
| path: earth_life_sciences/test-* | |
| - config_name: liberal_arts_and_social_sciences | |
| data_files: | |
| - split: dev | |
| path: liberal_arts_and_social_sciences/dev-* | |
| - split: test | |
| path: liberal_arts_and_social_sciences/test-* | |
| - config_name: medical_science | |
| data_files: | |
| - split: dev | |
| path: medical_science/dev-* | |
| - split: test | |
| path: medical_science/test-* | |
| - config_name: physics_mathematics | |
| data_files: | |
| - split: dev | |
| path: physics_mathematics/dev-* | |
| - split: test | |
| path: physics_mathematics/test-* | |