RoboCOIN-DataManager-assets / dataset_info /AIRBOT_MMK2_open_notebook.yaml
RogersPyke's picture
Update RoboCOIN assets
a687d6d verified
dataset_name: open_notebook
dataset_uuid: f7156fc6-87bb-4662-b1ff-9cf8de2c4742
scene_type:
- home
atomic_actions:
- grasp
- open
- hold
end_effector_type:
- five_finger_hand
operation_platform_height: 77.2
objects:
- object_name: table
level1: furniture
level2: table
level3: null
level4: null
level5: null
- object_name: notebook
level1: office_supplies
level2: notebook
level3: null
level4: null
level5: null
path: AIRBOT_MMK2_open_notebook
video_url: ./assets/videos/AIRBOT_MMK2_open_notebook.mp4
thumbnail_url: ./assets/thumbnails/AIRBOT_MMK2_open_notebook.jpg
license: apache-2.0
language:
- en
- zh
task_categories:
- robotics
tags:
- RoboCOIN
- LeRobot
frame_range: 10K-100K
dataset_size: 419.3MB
configs:
- config_name: default
data_files: data/*/*.parquet
authors:
contributed_by:
- name: RoboCOIN
url: https://flagopen.github.io/RoboCOIN/
affiliation: RoboCOIN Team
annotated_by:
- name: RoboCOIN
url: https://flagopen.github.io/RoboCOIN/
affiliation: RoboCOIN Team
dataset_description: This dataset uses an extended format based on LeRobot and is
fully compatible with LeRobot.
homepage: https://flagopen.github.io/RoboCOIN/
paper: https://arxiv.org/abs/2511.17441
repository: https://github.com/FlagOpen/RoboCOIN
project_page: https://flagopen.github.io/RoboCOIN/
issues_url: https://github.com/FlagOpen/RoboCOIN/issues
robot_type: AIRBOT_MMK2
codebase_version: v2.1
statistics:
total_episodes: 49
total_frames: 11348
total_tasks: 1
total_videos: 196
total_chunks: 1
chunks_size: 1000
fps: 30
splits:
train: 0:48
data_path: data/chunk-{episode_chunk:03d}/episode_{episode_index:06d}.parquet
video_path: videos/chunk-{episode_chunk:03d}/{video_key}/episode_{episode_index:06d}.mp4
features:
observation.images.cam_high_rgb:
dtype: video
shape:
- 480
- 640
- 3
names:
- height
- width
- channels
info:
video.height: 480
video.width: 640
video.codec: av1
video.pix_fmt: yuv420p
video.is_depth_map: false
video.fps: 30
video.channels: 3
has_audio: false
observation.images.cam_left_wrist_rgb:
dtype: video
shape:
- 480
- 640
- 3
names:
- height
- width
- channels
info:
video.height: 480
video.width: 640
video.codec: av1
video.pix_fmt: yuv420p
video.is_depth_map: false
video.fps: 30
video.channels: 3
has_audio: false
observation.images.cam_right_wrist_rgb:
dtype: video
shape:
- 480
- 640
- 3
names:
- height
- width
- channels
info:
video.height: 480
video.width: 640
video.codec: av1
video.pix_fmt: yuv420p
video.is_depth_map: false
video.fps: 30
video.channels: 3
has_audio: false
observation.images.cam_third_view:
dtype: video
shape:
- 480
- 640
- 3
names:
- height
- width
- channels
info:
video.height: 480
video.width: 640
video.codec: av1
video.pix_fmt: yuv420p
video.is_depth_map: false
video.fps: 30
video.channels: 3
has_audio: false
observation.state:
dtype: float32
shape:
- 36
names:
- left_arm_joint_1_rad
- left_arm_joint_2_rad
- left_arm_joint_3_rad
- left_arm_joint_4_rad
- left_arm_joint_5_rad
- left_arm_joint_6_rad
- right_arm_joint_1_rad
- right_arm_joint_2_rad
- right_arm_joint_3_rad
- right_arm_joint_4_rad
- right_arm_joint_5_rad
- right_arm_joint_6_rad
- left_hand_joint_1_rad
- left_hand_joint_2_rad
- left_hand_joint_3_rad
- left_hand_joint_4_rad
- left_hand_joint_5_rad
- left_hand_joint_6_rad
- left_hand_joint_7_rad
- left_hand_joint_8_rad
- left_hand_joint_9_rad
- left_hand_joint_10_rad
- left_hand_joint_11_rad
- left_hand_joint_12_rad
- right_hand_joint_1_rad
- right_hand_joint_2_rad
- right_hand_joint_3_rad
- right_hand_joint_4_rad
- right_hand_joint_5_rad
- right_hand_joint_6_rad
- right_hand_joint_7_rad
- right_hand_joint_8_rad
- right_hand_joint_9_rad
- right_hand_joint_10_rad
- right_hand_joint_11_rad
- right_hand_joint_12_rad
action:
dtype: float32
shape:
- 36
names:
- left_arm_joint_1_rad
- left_arm_joint_2_rad
- left_arm_joint_3_rad
- left_arm_joint_4_rad
- left_arm_joint_5_rad
- left_arm_joint_6_rad
- right_arm_joint_1_rad
- right_arm_joint_2_rad
- right_arm_joint_3_rad
- right_arm_joint_4_rad
- right_arm_joint_5_rad
- right_arm_joint_6_rad
- left_hand_joint_1_rad
- left_hand_joint_2_rad
- left_hand_joint_3_rad
- left_hand_joint_4_rad
- left_hand_joint_5_rad
- left_hand_joint_6_rad
- left_hand_joint_7_rad
- left_hand_joint_8_rad
- left_hand_joint_9_rad
- left_hand_joint_10_rad
- left_hand_joint_11_rad
- left_hand_joint_12_rad
- right_hand_joint_1_rad
- right_hand_joint_2_rad
- right_hand_joint_3_rad
- right_hand_joint_4_rad
- right_hand_joint_5_rad
- right_hand_joint_6_rad
- right_hand_joint_7_rad
- right_hand_joint_8_rad
- right_hand_joint_9_rad
- right_hand_joint_10_rad
- right_hand_joint_11_rad
- right_hand_joint_12_rad
timestamp:
dtype: float32
shape:
- 1
names: null
frame_index:
dtype: int64
shape:
- 1
names: null
episode_index:
dtype: int64
shape:
- 1
names: null
index:
dtype: int64
shape:
- 1
names: null
task_index:
dtype: int64
shape:
- 1
names: null
subtask_annotation:
names: null
dtype: int32
shape:
- 5
scene_annotation:
names: null
dtype: int32
shape:
- 1
eef_sim_pose_state:
names:
- left_eef_pos_x
- left_eef_pos_y
- left_eef_pos_z
- left_eef_ori_x
- left_eef_ori_y
- left_eef_ori_z
- right_eef_pos_x
- right_eef_pos_y
- right_eef_pos_z
- right_eef_ori_x
- right_eef_ori_y
- right_eef_ori_z
dtype: float32
shape:
- 12
eef_sim_pose_action:
names:
- left_eef_pos_x
- left_eef_pos_y
- left_eef_pos_z
- left_eef_ori_x
- left_eef_ori_y
- left_eef_ori_z
- right_eef_pos_x
- right_eef_pos_y
- right_eef_pos_z
- right_eef_ori_x
- right_eef_ori_y
- right_eef_ori_z
dtype: float32
shape:
- 12
eef_direction_state:
names:
- left_eef_direction
- right_eef_direction
dtype: int32
shape:
- 2
eef_direction_action:
names:
- left_eef_direction
- right_eef_direction
dtype: int32
shape:
- 2
eef_velocity_state:
names:
- left_eef_velocity
- right_eef_velocity
dtype: int32
shape:
- 2
eef_velocity_action:
names:
- left_eef_velocity
- right_eef_velocity
dtype: int32
shape:
- 2
eef_acc_mag_state:
names:
- left_eef_acc_mag
- right_eef_acc_mag
dtype: int32
shape:
- 2
eef_acc_mag_action:
names:
- left_eef_acc_mag
- right_eef_acc_mag
dtype: int32
shape:
- 2
tasks: Press the bottom corner of the notebook with one hand and open it with the
other.
sub_tasks:
- Release the laptop with the right gripper
- Press the laptop with the right gripper
- Open the laptop with the left gripper
- Static
- End
- Grasp the laptop with the left gripper
- 'null'
annotations:
subtask_annotation: auto_generated
scene_annotation: auto_generated
eef_direction: auto_generated
eef_velocity: auto_generated
eef_acc_mag: auto_generated
gripper_mode: auto_generated
gripper_activity: auto_generated
cameras:
- key: observation.images.cam_high_rgb
name: cam_high_rgb
dtype: video
shape:
- 480
- 640
- 3
resolution:
- 480
- 640
fps: 30
is_depth: false
- key: observation.images.cam_left_wrist_rgb
name: cam_left_wrist_rgb
dtype: video
shape:
- 480
- 640
- 3
resolution:
- 480
- 640
fps: 30
is_depth: false
- key: observation.images.cam_right_wrist_rgb
name: cam_right_wrist_rgb
dtype: video
shape:
- 480
- 640
- 3
resolution:
- 480
- 640
fps: 30
is_depth: false
- key: observation.images.cam_third_view
name: cam_third_view
dtype: video
shape:
- 480
- 640
- 3
resolution:
- 480
- 640
fps: 30
is_depth: false
observation_space:
images:
- key: observation.images.cam_high_rgb
dtype: video
shape:
- 480
- 640
- 3
names:
- height
- width
- channels
- key: observation.images.cam_left_wrist_rgb
dtype: video
shape:
- 480
- 640
- 3
names:
- height
- width
- channels
- key: observation.images.cam_right_wrist_rgb
dtype: video
shape:
- 480
- 640
- 3
names:
- height
- width
- channels
- key: observation.images.cam_third_view
dtype: video
shape:
- 480
- 640
- 3
names:
- height
- width
- channels
state:
dtype: float32
shape:
- 36
names:
- left_arm_joint_1_rad
- left_arm_joint_2_rad
- left_arm_joint_3_rad
- left_arm_joint_4_rad
- left_arm_joint_5_rad
- left_arm_joint_6_rad
- right_arm_joint_1_rad
- right_arm_joint_2_rad
- right_arm_joint_3_rad
- right_arm_joint_4_rad
- right_arm_joint_5_rad
- right_arm_joint_6_rad
- left_hand_joint_1_rad
- left_hand_joint_2_rad
- left_hand_joint_3_rad
- left_hand_joint_4_rad
- left_hand_joint_5_rad
- left_hand_joint_6_rad
- left_hand_joint_7_rad
- left_hand_joint_8_rad
- left_hand_joint_9_rad
- left_hand_joint_10_rad
- left_hand_joint_11_rad
- left_hand_joint_12_rad
- right_hand_joint_1_rad
- right_hand_joint_2_rad
- right_hand_joint_3_rad
- right_hand_joint_4_rad
- right_hand_joint_5_rad
- right_hand_joint_6_rad
- right_hand_joint_7_rad
- right_hand_joint_8_rad
- right_hand_joint_9_rad
- right_hand_joint_10_rad
- right_hand_joint_11_rad
- right_hand_joint_12_rad
action_space:
dtype: float32
shape:
- 36
names:
- left_arm_joint_1_rad
- left_arm_joint_2_rad
- left_arm_joint_3_rad
- left_arm_joint_4_rad
- left_arm_joint_5_rad
- left_arm_joint_6_rad
- right_arm_joint_1_rad
- right_arm_joint_2_rad
- right_arm_joint_3_rad
- right_arm_joint_4_rad
- right_arm_joint_5_rad
- right_arm_joint_6_rad
- left_hand_joint_1_rad
- left_hand_joint_2_rad
- left_hand_joint_3_rad
- left_hand_joint_4_rad
- left_hand_joint_5_rad
- left_hand_joint_6_rad
- left_hand_joint_7_rad
- left_hand_joint_8_rad
- left_hand_joint_9_rad
- left_hand_joint_10_rad
- left_hand_joint_11_rad
- left_hand_joint_12_rad
- right_hand_joint_1_rad
- right_hand_joint_2_rad
- right_hand_joint_3_rad
- right_hand_joint_4_rad
- right_hand_joint_5_rad
- right_hand_joint_6_rad
- right_hand_joint_7_rad
- right_hand_joint_8_rad
- right_hand_joint_9_rad
- right_hand_joint_10_rad
- right_hand_joint_11_rad
- right_hand_joint_12_rad
eef_sim_pose: auto_generated
gripper_open_scale: auto_generated
depth_enabled: false
data_schema: auto_generated
structure: "AIRBOT_MMK2_open_notebook_qced_hardlink/\nβ”œβ”€β”€ annotations/\nβ”‚ β”œβ”€β”€ eef_acc_mag_annotation.jsonl\n\
β”‚ β”œβ”€β”€ eef_direction_annotation.jsonl\nβ”‚ β”œβ”€β”€ eef_velocity_annotation.jsonl\n\
β”‚ β”œβ”€β”€ gripper_activity_annotation.jsonl\nβ”‚ β”œβ”€β”€ gripper_mode_annotation.jsonl\n\
β”‚ └── (...)\nβ”œβ”€β”€ data/\nβ”‚ └── chunk-000/\nβ”‚ β”œβ”€β”€ episode_000000.parquet\n\
β”‚ β”œβ”€β”€ episode_000001.parquet\nβ”‚ β”œβ”€β”€ episode_000002.parquet\nβ”‚ \
\ β”œβ”€β”€ episode_000003.parquet\nβ”‚ β”œβ”€β”€ episode_000004.parquet\nβ”‚ └── (...)\n\
β”œβ”€β”€ meta/\nβ”‚ β”œβ”€β”€ episodes.jsonl\nβ”‚ β”œβ”€β”€ episodes_stats.jsonl\nβ”‚ β”œβ”€β”€ info.json\n\
β”‚ └── tasks.jsonl\n└── videos/\n └── chunk-000/\n β”œβ”€β”€ observation.images.cam_high_rgb/\n\
\ β”‚ β”œβ”€β”€ episode_000000.mp4\n β”‚ β”œβ”€β”€ episode_000001.mp4\n \
\ β”‚ β”œβ”€β”€ episode_000002.mp4\n β”‚ β”œβ”€β”€ episode_000003.mp4\n β”‚ β”œβ”€β”€\
\ episode_000004.mp4\n β”‚ └── (...)\n β”œβ”€β”€ observation.images.cam_left_wrist_rgb/\n\
\ β”‚ β”œβ”€β”€ episode_000000.mp4\n β”‚ β”œβ”€β”€ episode_000001.mp4\n \
\ β”‚ β”œβ”€β”€ episode_000002.mp4\n β”‚ β”œβ”€β”€ episode_000003.mp4\n β”‚ β”œβ”€β”€\
\ episode_000004.mp4\n β”‚ └── (...)\n β”œβ”€β”€ observation.images.cam_right_wrist_rgb/\n\
\ β”‚ β”œβ”€β”€ episode_000000.mp4\n β”‚ β”œβ”€β”€ episode_000001.mp4\n \
\ β”‚ β”œβ”€β”€ episode_000002.mp4\n β”‚ β”œβ”€β”€ episode_000003.mp4\n β”‚ β”œβ”€β”€\
\ episode_000004.mp4\n β”‚ └── (...)\n └── observation.images.cam_third_view/\n\
\ β”œβ”€β”€ episode_000000.mp4\n β”œβ”€β”€ episode_000001.mp4\n \
\ β”œβ”€β”€ episode_000002.mp4\n β”œβ”€β”€ episode_000003.mp4\n β”œβ”€β”€\
\ episode_000004.mp4\n └── (...)"
contact_email: null
contact_info: For questions, issues, or feedback regarding this dataset, please contact
us.
support_info: For technical support, please open an issue on our GitHub repository.
license_details: Please refer to the LICENSE file for full license terms and conditions.
citation_bibtex: "@article{robocoin,\n title={RoboCOIN: An Open-Sourced Bimanual\
\ Robotic Data Collection for Integrated Manipulation},\n author={Shihan Wu,\
\ Xuecheng Liu, Shaoxuan Xie, Pengwei Wang, Xinghang Li, Bowen Yang, Zhe Li, Kai\
\ Zhu, Hongyu Wu, Yiheng Liu, Zhaoye Long, Yue Wang, Chong Liu, Dihan Wang, Ziqiang\
\ Ni, Xiang Yang, You Liu, Ruoxuan Feng, Runtian Xu, Lei Zhang, Denghang Huang,\
\ Chenghao Jin, Anlan Yin, Xinlong Wang, Zhenguo Sun, Junkai Zhao, Mengfei Du, Mingyu\
\ Cao, Xiansheng Chen, Hongyang Cheng, Xiaojie Zhang, Yankai Fu, Ning Chen, Cheng\
\ Chi, Sixiang Chen, Huaihai Lyu, Xiaoshuai Hao, Yequan Wang, Bo Lei, Dong Liu,\
\ Xi Yang, Yance Jiao, Tengfei Pan, Yunyan Zhang, Songjing Wang, Ziqian Zhang, Xu\
\ Liu, Ji Zhang, Caowei Meng, Zhizheng Zhang, Jiyang Gao, Song Wang, Xiaokun Leng,\
\ Zhiqiang Xie, Zhenzhen Zhou, Peng Huang, Wu Yang, Yandong Guo, Yichao Zhu, Suibing\
\ Zheng, Hao Cheng, Xinmin Ding, Yang Yue, Huanqian Wang, Chi Chen, Jingrui Pang,\
\ YuXi Qian, Haoran Geng, Lianli Gao, Haiyuan Li, Bin Fang, Gao Huang, Yaodong Yang,\
\ Hao Dong, He Wang, Hang Zhao, Yadong Mu, Di Hu, Hao Zhao, Tiejun Huang, Shanghang\
\ Zhang, Yonghua Lin, Zhongyuan Wang and Guocai Yao},\n journal={arXiv preprint\
\ arXiv:2511.17441},\n url = {https://arxiv.org/abs/2511.17441},\n year={2025}\n\
\ }"
additional_citations: 'If you use this dataset, please also consider citing:
- LeRobot Framework: https://github.com/huggingface/lerobot'
version_info: '## Version History
- v1.0.0 (2025-11): Initial release'
raw:
dataset_name: open_notebook
dataset_uuid: null
task_descriptions:
- press the bottom right corner of the notebook with your right hand and open the
notebook with your left hand.
scene_type:
- home
atomic_actions:
- grasp
- open
- hold
objects:
- object_name: table
level1: furniture
level2: table
level3: null
level4: null
level5: null
- object_name: notebook
level1: office_supplies
level2: notebook
level3: null
level4: null
level5: null
operation_platform_height: 77.2
device_model:
- discover_robotics_aitbot_mmk2
end_effector_type: two_finger_gripper