|
|
--- |
|
|
dataset_info: |
|
|
- config_name: art_and_design |
|
|
features: |
|
|
- name: id |
|
|
dtype: string |
|
|
- name: type |
|
|
dtype: string |
|
|
- name: source_type |
|
|
dtype: string |
|
|
- name: source |
|
|
dtype: string |
|
|
- name: question |
|
|
dtype: string |
|
|
- name: option1 |
|
|
dtype: string |
|
|
- name: option2 |
|
|
dtype: string |
|
|
- name: option3 |
|
|
dtype: string |
|
|
- name: option4 |
|
|
dtype: string |
|
|
- name: image_1 |
|
|
dtype: image |
|
|
- name: image_2 |
|
|
dtype: image |
|
|
- name: image_3 |
|
|
dtype: image |
|
|
- name: image_4 |
|
|
dtype: image |
|
|
- name: image_5 |
|
|
dtype: image |
|
|
- name: answer |
|
|
dtype: string |
|
|
- name: analysis |
|
|
dtype: string |
|
|
- name: distribution |
|
|
dtype: string |
|
|
- name: difficulty_level |
|
|
dtype: string |
|
|
- name: subcategory |
|
|
dtype: string |
|
|
- name: category |
|
|
dtype: string |
|
|
- name: subfield |
|
|
dtype: string |
|
|
- name: img_type |
|
|
dtype: string |
|
|
- name: image_1_filename |
|
|
dtype: string |
|
|
- name: image_2_filename |
|
|
dtype: string |
|
|
- name: image_3_filename |
|
|
dtype: string |
|
|
- name: image_4_filename |
|
|
dtype: string |
|
|
- name: image_5_filename |
|
|
dtype: string |
|
|
splits: |
|
|
- name: dev |
|
|
num_bytes: 3113808.0 |
|
|
num_examples: 11 |
|
|
- name: val |
|
|
num_bytes: 25493074.0 |
|
|
num_examples: 88 |
|
|
- name: test |
|
|
num_bytes: 311985416.171 |
|
|
num_examples: 1091 |
|
|
download_size: 343578142 |
|
|
dataset_size: 340592298.171 |
|
|
- config_name: business |
|
|
features: |
|
|
- name: id |
|
|
dtype: string |
|
|
- name: type |
|
|
dtype: string |
|
|
- name: source_type |
|
|
dtype: string |
|
|
- name: source |
|
|
dtype: string |
|
|
- name: question |
|
|
dtype: string |
|
|
- name: option1 |
|
|
dtype: string |
|
|
- name: option2 |
|
|
dtype: string |
|
|
- name: option3 |
|
|
dtype: string |
|
|
- name: option4 |
|
|
dtype: string |
|
|
- name: image_1 |
|
|
dtype: image |
|
|
- name: image_2 |
|
|
dtype: image |
|
|
- name: image_3 |
|
|
dtype: image |
|
|
- name: image_4 |
|
|
dtype: image |
|
|
- name: image_5 |
|
|
dtype: image |
|
|
- name: answer |
|
|
dtype: string |
|
|
- name: analysis |
|
|
dtype: string |
|
|
- name: distribution |
|
|
dtype: string |
|
|
- name: difficulty_level |
|
|
dtype: string |
|
|
- name: subcategory |
|
|
dtype: string |
|
|
- name: category |
|
|
dtype: string |
|
|
- name: subfield |
|
|
dtype: string |
|
|
- name: img_type |
|
|
dtype: string |
|
|
- name: image_1_filename |
|
|
dtype: string |
|
|
- name: image_2_filename |
|
|
dtype: string |
|
|
- name: image_3_filename |
|
|
dtype: string |
|
|
- name: image_4_filename |
|
|
dtype: string |
|
|
- name: image_5_filename |
|
|
dtype: string |
|
|
splits: |
|
|
- name: dev |
|
|
num_bytes: 554457.0 |
|
|
num_examples: 16 |
|
|
- name: val |
|
|
num_bytes: 6152883.0 |
|
|
num_examples: 126 |
|
|
- name: test |
|
|
num_bytes: 60103968.654 |
|
|
num_examples: 1538 |
|
|
download_size: 74809661 |
|
|
dataset_size: 66811308.654 |
|
|
- config_name: health_and_medicine |
|
|
features: |
|
|
- name: id |
|
|
dtype: string |
|
|
- name: type |
|
|
dtype: string |
|
|
- name: source_type |
|
|
dtype: string |
|
|
- name: source |
|
|
dtype: string |
|
|
- name: question |
|
|
dtype: string |
|
|
- name: option1 |
|
|
dtype: string |
|
|
- name: option2 |
|
|
dtype: string |
|
|
- name: option3 |
|
|
dtype: string |
|
|
- name: option4 |
|
|
dtype: string |
|
|
- name: image_1 |
|
|
dtype: image |
|
|
- name: image_2 |
|
|
dtype: image |
|
|
- name: image_3 |
|
|
dtype: image |
|
|
- name: image_4 |
|
|
dtype: image |
|
|
- name: image_5 |
|
|
dtype: image |
|
|
- name: answer |
|
|
dtype: string |
|
|
- name: analysis |
|
|
dtype: string |
|
|
- name: distribution |
|
|
dtype: string |
|
|
- name: difficulty_level |
|
|
dtype: string |
|
|
- name: subcategory |
|
|
dtype: string |
|
|
- name: category |
|
|
dtype: string |
|
|
- name: subfield |
|
|
dtype: string |
|
|
- name: img_type |
|
|
dtype: string |
|
|
- name: image_1_filename |
|
|
dtype: string |
|
|
- name: image_2_filename |
|
|
dtype: string |
|
|
- name: image_3_filename |
|
|
dtype: string |
|
|
- name: image_4_filename |
|
|
dtype: string |
|
|
- name: image_5_filename |
|
|
dtype: string |
|
|
splits: |
|
|
- name: dev |
|
|
num_bytes: 2837834.0 |
|
|
num_examples: 18 |
|
|
- name: val |
|
|
num_bytes: 23957247.0 |
|
|
num_examples: 153 |
|
|
- name: test |
|
|
num_bytes: 204211130.315 |
|
|
num_examples: 1865 |
|
|
download_size: 293138089 |
|
|
dataset_size: 231006211.315 |
|
|
- config_name: humanities_and_social_sciences |
|
|
features: |
|
|
- name: id |
|
|
dtype: string |
|
|
- name: type |
|
|
dtype: string |
|
|
- name: source_type |
|
|
dtype: string |
|
|
- name: source |
|
|
dtype: string |
|
|
- name: question |
|
|
dtype: string |
|
|
- name: option1 |
|
|
dtype: string |
|
|
- name: option2 |
|
|
dtype: string |
|
|
- name: option3 |
|
|
dtype: string |
|
|
- name: option4 |
|
|
dtype: string |
|
|
- name: image_1 |
|
|
dtype: image |
|
|
- name: image_2 |
|
|
dtype: image |
|
|
- name: image_3 |
|
|
dtype: image |
|
|
- name: image_4 |
|
|
dtype: image |
|
|
- name: image_5 |
|
|
dtype: image |
|
|
- name: answer |
|
|
dtype: string |
|
|
- name: analysis |
|
|
dtype: string |
|
|
- name: distribution |
|
|
dtype: string |
|
|
- name: difficulty_level |
|
|
dtype: string |
|
|
- name: subcategory |
|
|
dtype: string |
|
|
- name: category |
|
|
dtype: string |
|
|
- name: subfield |
|
|
dtype: string |
|
|
- name: img_type |
|
|
dtype: string |
|
|
- name: image_1_filename |
|
|
dtype: string |
|
|
- name: image_2_filename |
|
|
dtype: string |
|
|
- name: image_3_filename |
|
|
dtype: string |
|
|
- name: image_4_filename |
|
|
dtype: string |
|
|
- name: image_5_filename |
|
|
dtype: string |
|
|
splits: |
|
|
- name: dev |
|
|
num_bytes: 1406107.0 |
|
|
num_examples: 11 |
|
|
- name: val |
|
|
num_bytes: 10657772.0 |
|
|
num_examples: 85 |
|
|
- name: test |
|
|
num_bytes: 157998857.894 |
|
|
num_examples: 1038 |
|
|
download_size: 160096107 |
|
|
dataset_size: 170062736.894 |
|
|
- config_name: science |
|
|
features: |
|
|
- name: id |
|
|
dtype: string |
|
|
- name: type |
|
|
dtype: string |
|
|
- name: source_type |
|
|
dtype: string |
|
|
- name: source |
|
|
dtype: string |
|
|
- name: question |
|
|
dtype: string |
|
|
- name: option1 |
|
|
dtype: string |
|
|
- name: option2 |
|
|
dtype: string |
|
|
- name: option3 |
|
|
dtype: string |
|
|
- name: option4 |
|
|
dtype: string |
|
|
- name: image_1 |
|
|
dtype: image |
|
|
- name: image_2 |
|
|
dtype: image |
|
|
- name: image_3 |
|
|
dtype: image |
|
|
- name: image_4 |
|
|
dtype: image |
|
|
- name: image_5 |
|
|
dtype: image |
|
|
- name: answer |
|
|
dtype: string |
|
|
- name: analysis |
|
|
dtype: string |
|
|
- name: distribution |
|
|
dtype: string |
|
|
- name: difficulty_level |
|
|
dtype: string |
|
|
- name: subcategory |
|
|
dtype: string |
|
|
- name: category |
|
|
dtype: string |
|
|
- name: subfield |
|
|
dtype: string |
|
|
- name: img_type |
|
|
dtype: string |
|
|
- name: image_1_filename |
|
|
dtype: string |
|
|
- name: image_2_filename |
|
|
dtype: string |
|
|
- name: image_3_filename |
|
|
dtype: string |
|
|
- name: image_4_filename |
|
|
dtype: string |
|
|
- name: image_5_filename |
|
|
dtype: string |
|
|
splits: |
|
|
- name: dev |
|
|
num_bytes: 3750526.0 |
|
|
num_examples: 25 |
|
|
- name: val |
|
|
num_bytes: 14762058.0 |
|
|
num_examples: 204 |
|
|
- name: test |
|
|
num_bytes: 268839654.208 |
|
|
num_examples: 2494 |
|
|
download_size: 214377468 |
|
|
dataset_size: 287352238.208 |
|
|
- config_name: technology_and_engineering |
|
|
features: |
|
|
- name: id |
|
|
dtype: string |
|
|
- name: type |
|
|
dtype: string |
|
|
- name: source_type |
|
|
dtype: string |
|
|
- name: source |
|
|
dtype: string |
|
|
- name: question |
|
|
dtype: string |
|
|
- name: option1 |
|
|
dtype: string |
|
|
- name: option2 |
|
|
dtype: string |
|
|
- name: option3 |
|
|
dtype: string |
|
|
- name: option4 |
|
|
dtype: string |
|
|
- name: image_1 |
|
|
dtype: image |
|
|
- name: image_2 |
|
|
dtype: image |
|
|
- name: image_3 |
|
|
dtype: image |
|
|
- name: image_4 |
|
|
dtype: image |
|
|
- name: image_5 |
|
|
dtype: image |
|
|
- name: answer |
|
|
dtype: string |
|
|
- name: analysis |
|
|
dtype: string |
|
|
- name: distribution |
|
|
dtype: string |
|
|
- name: difficulty_level |
|
|
dtype: string |
|
|
- name: subcategory |
|
|
dtype: string |
|
|
- name: category |
|
|
dtype: string |
|
|
- name: subfield |
|
|
dtype: string |
|
|
- name: img_type |
|
|
dtype: string |
|
|
- name: image_1_filename |
|
|
dtype: string |
|
|
- name: image_2_filename |
|
|
dtype: string |
|
|
- name: image_3_filename |
|
|
dtype: string |
|
|
- name: image_4_filename |
|
|
dtype: string |
|
|
- name: image_5_filename |
|
|
dtype: string |
|
|
splits: |
|
|
- name: dev |
|
|
num_bytes: 1518231.0 |
|
|
num_examples: 31 |
|
|
- name: val |
|
|
num_bytes: 14794870.0 |
|
|
num_examples: 244 |
|
|
- name: test |
|
|
num_bytes: 134814024.93 |
|
|
num_examples: 2974 |
|
|
download_size: 213411677 |
|
|
dataset_size: 151127125.93 |
|
|
configs: |
|
|
- config_name: art_and_design |
|
|
data_files: |
|
|
- split: dev |
|
|
path: art_and_design/dev-* |
|
|
- split: val |
|
|
path: art_and_design/val-* |
|
|
- split: test |
|
|
path: art_and_design/test-* |
|
|
- config_name: business |
|
|
data_files: |
|
|
- split: dev |
|
|
path: business/dev-* |
|
|
- split: val |
|
|
path: business/val-* |
|
|
- split: test |
|
|
path: business/test-* |
|
|
- config_name: health_and_medicine |
|
|
data_files: |
|
|
- split: dev |
|
|
path: health_and_medicine/dev-* |
|
|
- split: val |
|
|
path: health_and_medicine/val-* |
|
|
- split: test |
|
|
path: health_and_medicine/test-* |
|
|
- config_name: humanities_and_social_sciences |
|
|
data_files: |
|
|
- split: dev |
|
|
path: humanities_and_social_sciences/dev-* |
|
|
- split: val |
|
|
path: humanities_and_social_sciences/val-* |
|
|
- split: test |
|
|
path: humanities_and_social_sciences/test-* |
|
|
- config_name: science |
|
|
data_files: |
|
|
- split: dev |
|
|
path: science/dev-* |
|
|
- split: val |
|
|
path: science/val-* |
|
|
- split: test |
|
|
path: science/test-* |
|
|
- config_name: technology_and_engineering |
|
|
data_files: |
|
|
- split: dev |
|
|
path: technology_and_engineering/dev-* |
|
|
- split: val |
|
|
path: technology_and_engineering/val-* |
|
|
- split: test |
|
|
path: technology_and_engineering/test-* |
|
|
--- |
|
|
|
|
|
|
|
|
# CMMMU |
|
|
|
|
|
[**๐ Homepage**](https://cmmmu-benchmark.github.io/) | [**๐ค Paper**](https://huggingface.co/papers/2401.11944) | [**๐ arXiv**](https://arxiv.org/pdf/2401.11944.pdf) | [**๐ค Dataset**](https://huggingface.co/datasets/m-a-p/CMMMU) | [**GitHub**](https://github.com/CMMMU-Benchmark/CMMMU) |
|
|
|
|
|
## Introduction |
|
|
CMMMU includes 12k manually collected multimodal questions from college exams, quizzes, and textbooks, covering six core disciplines: Art & Design, Business, Science, Health & Medicine, Humanities & Social Science, and Tech \& Engineering, like its companion, MMMU. These questions span 30 subjects and comprise 39 highly heterogeneous image types, such as charts, diagrams, maps, tables, music sheets, and chemical structures. |
|
|
|
|
|
 |
|
|
|
|
|
|
|
|
## ๐ Mini-Leaderboard |
|
|
| Model | Val (900) | Test (11K) | |
|
|
|--------------------------------|:---------:|:------------:| |
|
|
| GPT-4V(ision) (Playground) | **42.5** | **43.7** | |
|
|
| Qwen-VL-PLUS* | 39.5 | 36.8 | |
|
|
| Yi-VL-34B | 36.2 | 36.5 | |
|
|
| Yi-VL-6B | 35.8 | 35.0 | |
|
|
| InternVL-Chat-V1.1* | 34.7 | 34.0 | |
|
|
| Qwen-VL-7B-Chat | 30.7 | 31.3 | |
|
|
| SPHINX-MoE* | 29.3 | 29.5 | |
|
|
| InternVL-Chat-ViT-6B-Vicuna-7B | 26.4 | 26.7 | |
|
|
| InternVL-Chat-ViT-6B-Vicuna-13B| 27.4 | 26.1 | |
|
|
| CogAgent-Chat | 24.6 | 23.6 | |
|
|
| Emu2-Chat | 23.8 | 24.5 | |
|
|
| Chinese-LLaVA | 25.5 | 23.4 | |
|
|
| VisCPM | 25.2 | 22.7 | |
|
|
| mPLUG-OWL2 | 20.8 | 22.2 | |
|
|
| Frequent Choice | 24.1 | 26.0 | |
|
|
| Random Choice | 21.6 | 21.6 | |
|
|
|
|
|
*: results provided by the authors. |
|
|
|
|
|
## Disclaimers |
|
|
The guidelines for the annotators emphasized strict compliance with copyright and licensing rules from the initial data source, specifically avoiding materials from websites that forbid copying and redistribution. |
|
|
Should you encounter any data samples potentially breaching the copyright or licensing regulations of any site, we encourage you to [contact](#contact) us. Upon verification, such samples will be promptly removed. |
|
|
|
|
|
## Contact |
|
|
- Ge Zhang: zhangge@01.ai |
|
|
- Wenhao Huang: huangwenhao@01.ai |
|
|
- Xinrun Du: duxinrun@01.ai |
|
|
- Bei Chen: chenbei@01.ai |
|
|
- Jie Fu: jiefu@ust.hk |
|
|
|
|
|
## Citation |
|
|
|
|
|
**BibTeX:** |
|
|
```bibtex |
|
|
@article{zhang2024cmmmu, |
|
|
title={CMMMU: A Chinese Massive Multi-discipline Multimodal Understanding Benchmark}, |
|
|
author={Ge, Zhang and Xinrun, Du and Bei, Chen and Yiming, Liang and Tongxu, Luo and Tianyu, Zheng and Kang, Zhu and Yuyang, Cheng and Chunpu, Xu and Shuyue, Guo and Haoran, Zhang and Xingwei, Qu and Junjie, Wang and Ruibin, Yuan and Yizhi, Li and Zekun, Wang and Yudong, Liu and Yu-Hsuan, Tsai and Fengji, Zhang and Chenghua, Lin and Wenhao, Huang and Jie, Fu}, |
|
|
journal={arXiv preprint arXiv:2401.20847}, |
|
|
year={2024}, |
|
|
} |
|
|
``` |
|
|
|